[ 520.749770] env[62740]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62740) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 520.750174] env[62740]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62740) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 520.750215] env[62740]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62740) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 520.750564] env[62740]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 520.838557] env[62740]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62740) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 520.847930] env[62740]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.009s {{(pid=62740) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 520.891128] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-900838bb-ce6b-4e8b-9d5b-36d7c2772ce6 None None] Creating reply queue: reply_30cb6e3d754a4ebf9cedab7950709402 [ 520.899654] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-900838bb-ce6b-4e8b-9d5b-36d7c2772ce6 None None] Expecting reply to msg 4198bb63f20c4ac486635409b51acc7a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 520.914301] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4198bb63f20c4ac486635409b51acc7a [ 520.984120] env[62740]: INFO nova.virt.driver [None req-900838bb-ce6b-4e8b-9d5b-36d7c2772ce6 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 521.058125] env[62740]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.058301] env[62740]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.058405] env[62740]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62740) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 524.255192] env[62740]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-1af47d7b-62f2-49c1-8eb8-abf8e473be34 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.272568] env[62740]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62740) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 524.272726] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-36d1d02c-1b4e-4358-8856-dc75f25e89fd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.298292] env[62740]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 5b8d2. [ 524.298474] env[62740]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.240s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 524.299078] env[62740]: INFO nova.virt.vmwareapi.driver [None req-900838bb-ce6b-4e8b-9d5b-36d7c2772ce6 None None] VMware vCenter version: 7.0.3 [ 524.302615] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4dd8487-90b6-446a-b85c-368ae5ce47a1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.320781] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f65a4a0-933a-4742-a4c7-58ceb7b45002 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.327251] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb491c9-1dc0-45d9-ac19-224af22e6bb4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.334359] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0688b541-cd2e-4fd4-a7cc-d4a6c9fe61cd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.348336] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ee0879-cc5e-4b6f-8287-424c9ad9a9f6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.354986] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655cf912-8d89-4c4b-9c82-b5906251b0fd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.385875] env[62740]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-9bf7e50a-3b71-40fa-a2d1-8d7591c8d565 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.391489] env[62740]: DEBUG nova.virt.vmwareapi.driver [None req-900838bb-ce6b-4e8b-9d5b-36d7c2772ce6 None None] Extension org.openstack.compute already exists. {{(pid=62740) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 524.394165] env[62740]: INFO nova.compute.provider_config [None req-900838bb-ce6b-4e8b-9d5b-36d7c2772ce6 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 524.394830] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-900838bb-ce6b-4e8b-9d5b-36d7c2772ce6 None None] Expecting reply to msg bc920fe902ea49cfb6cafb6fbeec6dc1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 524.413816] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc920fe902ea49cfb6cafb6fbeec6dc1 [ 524.414705] env[62740]: DEBUG nova.context [None req-900838bb-ce6b-4e8b-9d5b-36d7c2772ce6 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),4df378ef-afdc-4cc9-8d33-af664f17a763(cell1) {{(pid=62740) load_cells /opt/stack/nova/nova/context.py:464}} [ 524.416710] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.416922] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.417600] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 524.418032] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] Acquiring lock "4df378ef-afdc-4cc9-8d33-af664f17a763" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.418249] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] Lock "4df378ef-afdc-4cc9-8d33-af664f17a763" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.419271] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] Lock "4df378ef-afdc-4cc9-8d33-af664f17a763" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 524.439342] env[62740]: INFO dbcounter [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] Registered counter for database nova_cell0 [ 524.447377] env[62740]: INFO dbcounter [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] Registered counter for database nova_cell1 [ 524.450315] env[62740]: DEBUG oslo_db.sqlalchemy.engines [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62740) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 524.450673] env[62740]: DEBUG oslo_db.sqlalchemy.engines [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62740) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 524.455123] env[62740]: DEBUG dbcounter [-] [62740] Writer thread running {{(pid=62740) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 524.455838] env[62740]: DEBUG dbcounter [-] [62740] Writer thread running {{(pid=62740) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 524.458156] env[62740]: ERROR nova.db.main.api [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 524.458156] env[62740]: result = function(*args, **kwargs) [ 524.458156] env[62740]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 524.458156] env[62740]: return func(*args, **kwargs) [ 524.458156] env[62740]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 524.458156] env[62740]: result = fn(*args, **kwargs) [ 524.458156] env[62740]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 524.458156] env[62740]: return f(*args, **kwargs) [ 524.458156] env[62740]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 524.458156] env[62740]: return db.service_get_minimum_version(context, binaries) [ 524.458156] env[62740]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 524.458156] env[62740]: _check_db_access() [ 524.458156] env[62740]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 524.458156] env[62740]: stacktrace = ''.join(traceback.format_stack()) [ 524.458156] env[62740]: [ 524.458901] env[62740]: ERROR nova.db.main.api [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 524.458901] env[62740]: result = function(*args, **kwargs) [ 524.458901] env[62740]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 524.458901] env[62740]: return func(*args, **kwargs) [ 524.458901] env[62740]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 524.458901] env[62740]: result = fn(*args, **kwargs) [ 524.458901] env[62740]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 524.458901] env[62740]: return f(*args, **kwargs) [ 524.458901] env[62740]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 524.458901] env[62740]: return db.service_get_minimum_version(context, binaries) [ 524.458901] env[62740]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 524.458901] env[62740]: _check_db_access() [ 524.458901] env[62740]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 524.458901] env[62740]: stacktrace = ''.join(traceback.format_stack()) [ 524.458901] env[62740]: [ 524.459292] env[62740]: WARNING nova.objects.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 524.459404] env[62740]: WARNING nova.objects.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] Failed to get minimum service version for cell 4df378ef-afdc-4cc9-8d33-af664f17a763 [ 524.459810] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] Acquiring lock "singleton_lock" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 524.459971] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] Acquired lock "singleton_lock" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 524.460228] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] Releasing lock "singleton_lock" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 524.460553] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] Full set of CONF: {{(pid=62740) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 524.460700] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ******************************************************************************** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 524.460829] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] Configuration options gathered from: {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 524.460965] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 524.461173] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 524.461306] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ================================================================================ {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 524.461520] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] allow_resize_to_same_host = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.461690] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] arq_binding_timeout = 300 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.461823] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] backdoor_port = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.461951] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] backdoor_socket = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.462162] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] block_device_allocate_retries = 60 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.462339] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] block_device_allocate_retries_interval = 3 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.462530] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cert = self.pem {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.462704] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.462875] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] compute_monitors = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.463055] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] config_dir = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.463236] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] config_drive_format = iso9660 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.463373] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.463537] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] config_source = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.463705] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] console_host = devstack {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.463873] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] control_exchange = nova {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.464045] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cpu_allocation_ratio = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.464214] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] daemon = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.464385] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] debug = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.464546] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] default_access_ip_network_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.464713] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] default_availability_zone = nova {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.464870] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] default_ephemeral_format = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.465037] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] default_green_pool_size = 1000 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.465282] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.465450] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] default_schedule_zone = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.465609] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] disk_allocation_ratio = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.465773] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] enable_new_services = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.465950] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] enabled_apis = ['osapi_compute'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.466153] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] enabled_ssl_apis = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.466333] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] flat_injected = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.466500] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] force_config_drive = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.466662] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] force_raw_images = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.466833] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] graceful_shutdown_timeout = 5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.466999] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] heal_instance_info_cache_interval = 60 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.467230] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] host = cpu-1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.467411] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.467577] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.467740] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.467954] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.468174] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] instance_build_timeout = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.468394] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] instance_delete_interval = 300 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.468579] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] instance_format = [instance: %(uuid)s] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.468753] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] instance_name_template = instance-%08x {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.468921] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] instance_usage_audit = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.469128] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] instance_usage_audit_period = month {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.469322] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.469491] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.469659] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] internal_service_availability_zone = internal {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.469819] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] key = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.469982] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] live_migration_retry_count = 30 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.470168] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] log_config_append = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.470340] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.470504] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] log_dir = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.470668] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] log_file = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.470800] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] log_options = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.470964] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] log_rotate_interval = 1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.471184] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] log_rotate_interval_type = days {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.471370] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] log_rotation_type = none {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.471509] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.471681] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.471868] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.472063] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.472209] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.472379] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] long_rpc_timeout = 1800 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.472543] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] max_concurrent_builds = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.472703] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] max_concurrent_live_migrations = 1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.472863] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] max_concurrent_snapshots = 5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.473087] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] max_local_block_devices = 3 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.473280] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] max_logfile_count = 30 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.473445] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] max_logfile_size_mb = 200 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.473607] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] maximum_instance_delete_attempts = 5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.473780] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] metadata_listen = 0.0.0.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.473948] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] metadata_listen_port = 8775 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.474134] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] metadata_workers = 2 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.474301] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] migrate_max_retries = -1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.474470] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] mkisofs_cmd = genisoimage {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.474675] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.474808] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] my_ip = 10.180.1.21 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.474969] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] network_allocate_retries = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.475164] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.475337] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.475537] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] osapi_compute_listen_port = 8774 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.475667] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] osapi_compute_unique_server_name_scope = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.475835] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] osapi_compute_workers = 2 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.476011] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] password_length = 12 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.476179] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] periodic_enable = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.476343] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] periodic_fuzzy_delay = 60 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.476512] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] pointer_model = usbtablet {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.476679] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] preallocate_images = none {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.476842] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] publish_errors = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.476975] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] pybasedir = /opt/stack/nova {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.477176] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ram_allocation_ratio = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.477353] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] rate_limit_burst = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.477525] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] rate_limit_except_level = CRITICAL {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.477687] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] rate_limit_interval = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.477847] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] reboot_timeout = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.478026] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] reclaim_instance_interval = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.478223] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] record = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.478449] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] reimage_timeout_per_gb = 60 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.478629] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] report_interval = 120 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.478796] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] rescue_timeout = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.478960] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] reserved_host_cpus = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.479136] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] reserved_host_disk_mb = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.479313] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] reserved_host_memory_mb = 512 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.479507] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] reserved_huge_pages = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.479673] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] resize_confirm_window = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.479837] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] resize_fs_using_block_device = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.480006] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] resume_guests_state_on_host_boot = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.480187] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.480353] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] rpc_response_timeout = 60 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.480519] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] run_external_periodic_tasks = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.480691] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] running_deleted_instance_action = reap {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.480854] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.481028] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] running_deleted_instance_timeout = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.481226] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] scheduler_instance_sync_interval = 120 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.481404] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] service_down_time = 720 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.481577] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] servicegroup_driver = db {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.481740] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] shelved_offload_time = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.481901] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] shelved_poll_interval = 3600 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.482082] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] shutdown_timeout = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.482253] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] source_is_ipv6 = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.482414] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ssl_only = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.482661] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.482829] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] sync_power_state_interval = 600 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.482992] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] sync_power_state_pool_size = 1000 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.483227] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] syslog_log_facility = LOG_USER {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.483408] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] tempdir = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.483575] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] timeout_nbd = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.483749] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] transport_url = **** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.483910] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] update_resources_interval = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.484087] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] use_cow_images = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.484255] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] use_eventlog = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.484416] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] use_journal = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.484576] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] use_json = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.484740] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] use_rootwrap_daemon = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.484902] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] use_stderr = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.485098] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] use_syslog = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.485279] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vcpu_pin_set = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.485454] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vif_plugging_is_fatal = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.485637] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vif_plugging_timeout = 300 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.485789] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] virt_mkfs = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.485952] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] volume_usage_poll_interval = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.486137] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] watch_log_file = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.486322] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] web = /usr/share/spice-html5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 524.486525] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_concurrency.disable_process_locking = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.486826] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.487010] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.487193] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.487370] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.487543] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.487710] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.487893] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.auth_strategy = keystone {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.488083] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.compute_link_prefix = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.488284] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.488464] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.dhcp_domain = novalocal {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.488640] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.enable_instance_password = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.488808] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.glance_link_prefix = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.488977] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.489201] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.489378] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.instance_list_per_project_cells = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.489548] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.list_records_by_skipping_down_cells = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.489712] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.local_metadata_per_cell = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.489880] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.max_limit = 1000 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.490059] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.metadata_cache_expiration = 15 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.490241] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.neutron_default_tenant_id = default {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.490410] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.use_neutron_default_nets = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.490580] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.490745] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.490921] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.491110] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.491309] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.vendordata_dynamic_targets = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.491483] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.vendordata_jsonfile_path = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.491667] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.491864] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.backend = dogpile.cache.memcached {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.492045] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.backend_argument = **** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.492226] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.config_prefix = cache.oslo {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.492401] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.dead_timeout = 60.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.492568] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.debug_cache_backend = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.492732] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.enable_retry_client = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.492895] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.enable_socket_keepalive = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.493079] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.enabled = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.493254] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.enforce_fips_mode = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.493421] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.expiration_time = 600 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.493586] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.hashclient_retry_attempts = 2 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.493753] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.493918] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.memcache_dead_retry = 300 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.494094] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.memcache_password = **** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.494264] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.494432] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.494597] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.memcache_pool_maxsize = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.494760] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.494924] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.memcache_sasl_enabled = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.495124] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.495302] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.495465] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.memcache_username = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.495630] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.proxies = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.495792] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.redis_password = **** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.495965] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.496161] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.496331] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.redis_server = localhost:6379 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.496497] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.redis_socket_timeout = 1.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.496657] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.redis_username = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.496820] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.retry_attempts = 2 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.496984] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.retry_delay = 0.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.497162] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.socket_keepalive_count = 1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.497328] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.socket_keepalive_idle = 1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.497490] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.socket_keepalive_interval = 1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.497651] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.tls_allowed_ciphers = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.497813] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.tls_cafile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.497972] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.tls_certfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.498186] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.tls_enabled = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.498347] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cache.tls_keyfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.498528] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cinder.auth_section = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.498700] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cinder.auth_type = password {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.498860] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cinder.cafile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.499063] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.499260] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cinder.certfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.499441] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cinder.collect_timing = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.499611] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cinder.cross_az_attach = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.499776] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cinder.debug = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.499944] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cinder.endpoint_template = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.500123] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cinder.http_retries = 3 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.500297] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cinder.insecure = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.500461] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cinder.keyfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.500635] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cinder.os_region_name = RegionOne {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.500804] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cinder.split_loggers = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.500965] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cinder.timeout = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.501154] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.501321] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] compute.cpu_dedicated_set = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.501487] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] compute.cpu_shared_set = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.501685] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] compute.image_type_exclude_list = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.501825] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.501989] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.502170] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.502339] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.502515] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.502682] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] compute.resource_provider_association_refresh = 300 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.502848] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] compute.shutdown_retry_interval = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.503035] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.503245] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] conductor.workers = 2 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.503437] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] console.allowed_origins = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.503604] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] console.ssl_ciphers = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.503779] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] console.ssl_minimum_version = default {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.503952] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] consoleauth.enforce_session_timeout = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.504142] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] consoleauth.token_ttl = 600 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.504313] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.cafile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.504475] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.certfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.504644] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.collect_timing = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.504808] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.connect_retries = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.504968] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.connect_retry_delay = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.505143] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.endpoint_override = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.505313] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.insecure = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.505477] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.keyfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.505640] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.max_version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.505831] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.min_version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.505986] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.region_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.506139] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.retriable_status_codes = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.506297] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.service_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.506471] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.service_type = accelerator {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.506636] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.split_loggers = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.506823] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.status_code_retries = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.506964] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.status_code_retry_delay = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.507138] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.timeout = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.507326] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.507489] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] cyborg.version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.507672] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.backend = sqlalchemy {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.507848] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.connection = **** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.508025] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.connection_debug = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.508239] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.connection_parameters = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.508416] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.connection_recycle_time = 3600 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.508587] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.connection_trace = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.508754] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.db_inc_retry_interval = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.508920] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.db_max_retries = 20 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.509116] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.db_max_retry_interval = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.509303] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.db_retry_interval = 1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.509472] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.max_overflow = 50 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.509637] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.max_pool_size = 5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.509808] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.max_retries = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.510436] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.510436] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.mysql_wsrep_sync_wait = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.510436] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.pool_timeout = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.510523] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.retry_interval = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.510674] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.slave_connection = **** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.510842] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.sqlite_synchronous = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.511031] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] database.use_db_reconnect = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.511766] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.backend = sqlalchemy {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.511766] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.connection = **** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.511766] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.connection_debug = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.511766] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.connection_parameters = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.511983] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.connection_recycle_time = 3600 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.512113] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.connection_trace = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.512299] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.db_inc_retry_interval = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.512479] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.db_max_retries = 20 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.512648] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.db_max_retry_interval = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.512812] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.db_retry_interval = 1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.512976] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.max_overflow = 50 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.513209] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.max_pool_size = 5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.513381] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.max_retries = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.513556] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.513717] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.513877] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.pool_timeout = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.514053] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.retry_interval = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.514219] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.slave_connection = **** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.514386] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] api_database.sqlite_synchronous = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.514566] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] devices.enabled_mdev_types = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.514748] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.514922] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.515103] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ephemeral_storage_encryption.enabled = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.515301] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.515483] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.api_servers = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.515653] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.cafile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.515818] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.certfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.515989] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.collect_timing = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.516170] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.connect_retries = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.516333] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.connect_retry_delay = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.516502] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.debug = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.516669] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.default_trusted_certificate_ids = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.516834] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.enable_certificate_validation = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.516999] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.enable_rbd_download = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.517176] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.endpoint_override = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.517347] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.insecure = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.517512] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.keyfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.517677] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.max_version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.517837] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.min_version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.518014] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.num_retries = 3 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.518226] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.rbd_ceph_conf = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.518404] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.rbd_connect_timeout = 5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.518576] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.rbd_pool = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.518747] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.rbd_user = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.518909] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.region_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.519095] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.retriable_status_codes = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.519284] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.service_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.519469] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.service_type = image {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.519634] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.split_loggers = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.519793] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.status_code_retries = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.519954] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.status_code_retry_delay = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.520129] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.timeout = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.520317] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.520490] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.verify_glance_signatures = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.520648] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] glance.version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.520818] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] guestfs.debug = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.520987] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] mks.enabled = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.521363] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.521563] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] image_cache.manager_interval = 2400 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.521732] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] image_cache.precache_concurrency = 1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.521907] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] image_cache.remove_unused_base_images = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.522093] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.522270] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.522450] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] image_cache.subdirectory_name = _base {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.522635] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.api_max_retries = 60 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.522800] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.api_retry_interval = 2 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.522961] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.auth_section = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.523141] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.auth_type = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.523313] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.cafile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.523544] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.certfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.523714] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.collect_timing = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.523883] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.conductor_group = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.524057] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.connect_retries = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.524225] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.connect_retry_delay = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.524388] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.endpoint_override = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.524558] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.insecure = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.524732] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.keyfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.524880] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.max_version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.525050] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.min_version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.525222] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.peer_list = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.525386] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.region_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.525546] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.retriable_status_codes = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.525711] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.serial_console_state_timeout = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.525871] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.service_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.526056] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.service_type = baremetal {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.526224] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.shard = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.526391] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.split_loggers = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.526552] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.status_code_retries = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.526714] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.status_code_retry_delay = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.526873] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.timeout = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.527065] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.527229] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ironic.version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.527416] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.527592] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] key_manager.fixed_key = **** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.527776] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.527939] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.barbican_api_version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.528152] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.barbican_endpoint = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.528327] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.barbican_endpoint_type = public {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.528492] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.barbican_region_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.528654] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.cafile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.528815] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.certfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.528981] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.collect_timing = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.529198] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.insecure = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.529375] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.keyfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.529544] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.number_of_retries = 60 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.529711] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.retry_delay = 1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.529878] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.send_service_user_token = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.530056] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.split_loggers = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.530223] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.timeout = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.530392] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.verify_ssl = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.530556] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican.verify_ssl_path = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.530726] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican_service_user.auth_section = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.530892] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican_service_user.auth_type = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.531065] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican_service_user.cafile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.531229] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican_service_user.certfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.531397] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican_service_user.collect_timing = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.531563] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican_service_user.insecure = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.531723] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican_service_user.keyfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.531890] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican_service_user.split_loggers = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.532063] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] barbican_service_user.timeout = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.532236] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.approle_role_id = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.532399] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.approle_secret_id = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.532558] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.cafile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.532717] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.certfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.532882] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.collect_timing = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.533058] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.insecure = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.533225] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.keyfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.533402] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.kv_mountpoint = secret {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.533565] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.kv_path = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.533729] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.kv_version = 2 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.533889] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.namespace = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.534062] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.root_token_id = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.534233] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.split_loggers = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.534394] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.ssl_ca_crt_file = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.534554] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.timeout = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.534721] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.use_ssl = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.534893] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.535076] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.auth_section = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.535254] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.auth_type = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.535411] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.cafile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.535572] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.certfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.535737] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.collect_timing = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.535897] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.connect_retries = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.536069] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.connect_retry_delay = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.536233] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.endpoint_override = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.536399] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.insecure = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.536558] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.keyfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.536717] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.max_version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.536876] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.min_version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.537045] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.region_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.537209] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.retriable_status_codes = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.537368] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.service_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.537538] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.service_type = identity {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.537703] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.split_loggers = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.537862] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.status_code_retries = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.538032] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.status_code_retry_delay = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.538223] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.timeout = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.538411] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.538575] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] keystone.version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.538779] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.connection_uri = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.538945] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.cpu_mode = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.539127] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.539303] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.cpu_models = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.539474] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.cpu_power_governor_high = performance {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.539644] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.539810] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.cpu_power_management = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.539981] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.540160] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.device_detach_attempts = 8 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.540327] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.device_detach_timeout = 20 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.540494] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.disk_cachemodes = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.540656] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.disk_prefix = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.540823] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.enabled_perf_events = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.540991] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.file_backed_memory = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.541170] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.gid_maps = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.541334] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.hw_disk_discard = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.541490] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.hw_machine_type = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.541661] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.images_rbd_ceph_conf = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.541825] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.541991] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.542177] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.images_rbd_glance_store_name = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.542353] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.images_rbd_pool = rbd {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.542525] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.images_type = default {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.542687] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.images_volume_group = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.542855] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.inject_key = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.543020] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.inject_partition = -2 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.543188] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.inject_password = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.543356] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.iscsi_iface = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.543519] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.iser_use_multipath = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.543684] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.543851] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.544028] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.live_migration_downtime = 500 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.544187] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.544352] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.544510] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.live_migration_inbound_addr = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.544677] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.544843] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.545015] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.live_migration_scheme = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.545198] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.live_migration_timeout_action = abort {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.545365] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.live_migration_tunnelled = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.545525] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.live_migration_uri = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.545688] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.live_migration_with_native_tls = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.545847] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.max_queues = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.546028] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.546276] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.546446] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.nfs_mount_options = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.546740] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.546910] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.547089] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.547262] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.547427] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.547592] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.num_pcie_ports = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.547757] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.547922] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.pmem_namespaces = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.548116] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.quobyte_client_cfg = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.548408] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.548587] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.548756] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.548923] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.549113] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.rbd_secret_uuid = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.549304] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.rbd_user = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.549465] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.549639] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.549804] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.rescue_image_id = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.549967] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.rescue_kernel_id = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.550146] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.rescue_ramdisk_id = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.550318] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.550479] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.rx_queue_size = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.550647] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.smbfs_mount_options = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.550925] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.551133] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.snapshot_compression = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.551318] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.snapshot_image_format = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.551543] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.551713] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.sparse_logical_volumes = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.551879] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.swtpm_enabled = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.552061] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.swtpm_group = tss {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.552238] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.swtpm_user = tss {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.552412] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.sysinfo_serial = unique {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.552574] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.tb_cache_size = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.552734] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.tx_queue_size = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.552935] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.uid_maps = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.553119] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.use_virtio_for_bridges = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.553340] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.virt_type = kvm {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.553516] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.volume_clear = zero {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.553681] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.volume_clear_size = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.553850] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.volume_use_multipath = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.554016] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.vzstorage_cache_path = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.554191] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.554359] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.554525] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.554689] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.554964] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.555157] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.vzstorage_mount_user = stack {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.555332] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.555508] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.auth_section = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.555686] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.auth_type = password {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.555849] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.cafile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.556015] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.certfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.556186] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.collect_timing = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.556367] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.connect_retries = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.556503] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.connect_retry_delay = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.556672] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.default_floating_pool = public {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.556830] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.endpoint_override = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.556996] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.extension_sync_interval = 600 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.557174] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.http_retries = 3 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.557340] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.insecure = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.557498] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.keyfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.557655] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.max_version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.557827] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.557985] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.min_version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.558198] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.ovs_bridge = br-int {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.558373] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.physnets = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.558544] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.region_name = RegionOne {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.558705] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.retriable_status_codes = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.558892] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.service_metadata_proxy = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.559094] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.service_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.559282] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.service_type = network {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.559448] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.split_loggers = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.559607] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.status_code_retries = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.559766] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.status_code_retry_delay = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.559923] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.timeout = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.560117] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.560286] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] neutron.version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.560468] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] notifications.bdms_in_notifications = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.560644] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] notifications.default_level = INFO {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.560819] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] notifications.notification_format = unversioned {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.560984] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] notifications.notify_on_state_change = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.561173] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.561354] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] pci.alias = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.561525] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] pci.device_spec = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.561692] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] pci.report_in_placement = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.561863] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.auth_section = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.562084] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.auth_type = password {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.562270] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.562435] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.cafile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.562593] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.certfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.562758] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.collect_timing = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.562919] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.connect_retries = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.563089] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.connect_retry_delay = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.563285] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.default_domain_id = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.563453] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.default_domain_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.563615] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.domain_id = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.563773] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.domain_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.563933] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.endpoint_override = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.564112] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.insecure = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.564277] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.keyfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.564435] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.max_version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.564591] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.min_version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.564760] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.password = **** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.564922] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.project_domain_id = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.565135] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.project_domain_name = Default {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.565312] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.project_id = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.565487] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.project_name = service {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.565656] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.region_name = RegionOne {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.565818] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.retriable_status_codes = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.565980] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.service_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.566174] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.service_type = placement {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.566340] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.split_loggers = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.566503] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.status_code_retries = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.566663] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.status_code_retry_delay = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.566825] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.system_scope = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.566983] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.timeout = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.567159] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.trust_id = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.567322] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.user_domain_id = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.567489] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.user_domain_name = Default {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.567650] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.user_id = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.567825] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.username = placement {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.568037] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.568244] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] placement.version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.568432] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] quota.cores = 20 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.568602] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] quota.count_usage_from_placement = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.568777] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.568949] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] quota.injected_file_content_bytes = 10240 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.569132] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] quota.injected_file_path_length = 255 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.569305] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] quota.injected_files = 5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.569471] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] quota.instances = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.569639] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] quota.key_pairs = 100 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.569805] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] quota.metadata_items = 128 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.569970] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] quota.ram = 51200 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.570151] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] quota.recheck_quota = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.570327] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] quota.server_group_members = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.570518] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] quota.server_groups = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.570702] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.570871] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.571072] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] scheduler.image_metadata_prefilter = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.571257] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.571429] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] scheduler.max_attempts = 3 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.571596] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] scheduler.max_placement_results = 1000 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.571764] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.571928] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.572108] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.572288] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] scheduler.workers = 2 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.572463] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.572635] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.572813] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.572983] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.573163] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.573333] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.573495] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.573682] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.573853] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.host_subset_size = 1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.574039] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.574225] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.574399] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.574567] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.isolated_hosts = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.574737] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.isolated_images = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.574911] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.575088] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.575265] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.575433] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.pci_in_placement = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.575599] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.575764] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.575931] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.576110] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.576279] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.576444] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.576607] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.track_instance_changes = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.576783] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.576954] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] metrics.required = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.577155] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] metrics.weight_multiplier = 1.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.577334] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.577502] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] metrics.weight_setting = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.577821] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.578008] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] serial_console.enabled = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.578229] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] serial_console.port_range = 10000:20000 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.578404] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.578576] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.578744] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] serial_console.serialproxy_port = 6083 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.578911] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] service_user.auth_section = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.579113] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] service_user.auth_type = password {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.579320] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] service_user.cafile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.579517] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] service_user.certfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.579692] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] service_user.collect_timing = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.579857] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] service_user.insecure = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.580026] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] service_user.keyfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.580209] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] service_user.send_service_user_token = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.580380] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] service_user.split_loggers = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.580541] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] service_user.timeout = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.580715] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] spice.agent_enabled = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.580893] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] spice.enabled = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.581216] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.581417] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.581606] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] spice.html5proxy_port = 6082 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.581792] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] spice.image_compression = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.581986] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] spice.jpeg_compression = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.582174] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] spice.playback_compression = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.582352] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] spice.server_listen = 127.0.0.1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.582522] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.582684] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] spice.streaming_mode = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.582842] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] spice.zlib_compression = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.583022] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] upgrade_levels.baseapi = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.583200] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] upgrade_levels.compute = auto {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.583365] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] upgrade_levels.conductor = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.583524] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] upgrade_levels.scheduler = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.583690] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.583855] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.584027] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vendordata_dynamic_auth.cafile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.584191] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vendordata_dynamic_auth.certfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.584361] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.584524] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vendordata_dynamic_auth.insecure = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.584705] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.584886] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.585059] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vendordata_dynamic_auth.timeout = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.585240] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.api_retry_count = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.585405] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.ca_file = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.585577] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.585746] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.cluster_name = testcl1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.585909] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.connection_pool_size = 10 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.586106] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.console_delay_seconds = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.586295] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.datastore_regex = ^datastore.* {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.586503] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.586679] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.host_password = **** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.586846] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.host_port = 443 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.587026] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.host_username = administrator@vsphere.local {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.587204] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.insecure = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.587373] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.integration_bridge = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.587541] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.maximum_objects = 100 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.587702] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.pbm_default_policy = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.587870] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.pbm_enabled = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.588037] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.pbm_wsdl_location = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.588244] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.588412] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.serial_port_proxy_uri = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.588573] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.serial_port_service_uri = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.588740] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.task_poll_interval = 0.5 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.588913] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.use_linked_clone = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.589122] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.vnc_keymap = en-us {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.589311] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.vnc_port = 5900 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.589479] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vmware.vnc_port_total = 10000 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.589668] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vnc.auth_schemes = ['none'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.589845] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vnc.enabled = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.590159] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.590353] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.590527] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vnc.novncproxy_port = 6080 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.590707] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vnc.server_listen = 127.0.0.1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.590881] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.591056] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vnc.vencrypt_ca_certs = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.591222] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vnc.vencrypt_client_cert = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.591384] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vnc.vencrypt_client_key = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.591562] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.591727] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.disable_deep_image_inspection = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.591893] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.592069] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.592241] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.592407] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.disable_rootwrap = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.592572] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.enable_numa_live_migration = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.592734] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.592897] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.593074] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.593242] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.libvirt_disable_apic = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.593409] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.593576] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.593761] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.593926] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.594102] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.594271] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.594431] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.594594] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.594756] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.594921] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.595126] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.595302] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] wsgi.client_socket_timeout = 900 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.595473] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] wsgi.default_pool_size = 1000 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.595644] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] wsgi.keep_alive = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.595810] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] wsgi.max_header_line = 16384 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.595976] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.596156] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] wsgi.ssl_ca_file = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.596322] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] wsgi.ssl_cert_file = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.596483] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] wsgi.ssl_key_file = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.596648] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] wsgi.tcp_keepidle = 600 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.596824] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.596990] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] zvm.ca_file = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.597165] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] zvm.cloud_connector_url = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.597453] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.597626] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] zvm.reachable_timeout = 300 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.597809] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_policy.enforce_new_defaults = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.597984] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_policy.enforce_scope = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.598202] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_policy.policy_default_rule = default {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.598388] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.598566] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_policy.policy_file = policy.yaml {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.598737] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.598898] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.599476] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.599476] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.599476] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.599659] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.599743] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.599923] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] profiler.connection_string = messaging:// {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.600109] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] profiler.enabled = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.600286] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] profiler.es_doc_type = notification {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.600452] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] profiler.es_scroll_size = 10000 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.600620] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] profiler.es_scroll_time = 2m {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.600784] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] profiler.filter_error_trace = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.600956] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] profiler.hmac_keys = **** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.601137] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] profiler.sentinel_service_name = mymaster {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.601307] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] profiler.socket_timeout = 0.1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.601471] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] profiler.trace_requests = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.601635] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] profiler.trace_sqlalchemy = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.601815] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] profiler_jaeger.process_tags = {} {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.601978] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] profiler_jaeger.service_name_prefix = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.602160] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] profiler_otlp.service_name_prefix = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.602328] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] remote_debug.host = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.602488] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] remote_debug.port = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.602668] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.602835] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.602998] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.603179] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.603348] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.603512] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.603679] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.603841] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.604013] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.604195] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.604359] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.604529] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.604696] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.604866] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.605047] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.605223] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.605389] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.605564] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.605730] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.605894] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.606071] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.606240] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.606407] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.606572] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.606733] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.606894] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.607066] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.607235] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.607405] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.607573] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.ssl = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.607746] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.607917] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.608116] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.608299] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.608473] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.608637] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.608825] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.608994] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_notifications.retry = -1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.609202] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.609382] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.609554] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.auth_section = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.609718] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.auth_type = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.609880] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.cafile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.610049] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.certfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.610219] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.collect_timing = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.610378] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.connect_retries = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.610581] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.connect_retry_delay = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.610748] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.endpoint_id = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.610907] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.endpoint_override = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.611083] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.insecure = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.611248] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.keyfile = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.611405] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.max_version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.611566] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.min_version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.611723] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.region_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.611882] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.retriable_status_codes = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.612050] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.service_name = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.612211] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.service_type = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.612374] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.split_loggers = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.612533] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.status_code_retries = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.612692] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.status_code_retry_delay = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.612849] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.timeout = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.613014] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.valid_interfaces = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.613180] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_limit.version = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.613346] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_reports.file_event_handler = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.613508] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.613667] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] oslo_reports.log_dir = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.613838] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.613997] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.614171] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.614340] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.614504] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.614661] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.614833] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.614993] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vif_plug_ovs_privileged.group = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.615166] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.615335] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.615497] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.615656] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] vif_plug_ovs_privileged.user = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.615827] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.616013] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.616195] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.616371] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.616544] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.616711] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.616891] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.617061] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.617246] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.617424] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_vif_ovs.isolate_vif = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.617599] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.617767] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.617943] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.618153] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.618331] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_vif_ovs.per_port_bridge = False {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.618501] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] os_brick.lock_path = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.618671] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] privsep_osbrick.capabilities = [21] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.618833] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] privsep_osbrick.group = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.618993] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] privsep_osbrick.helper_command = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.619177] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.619342] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.619547] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] privsep_osbrick.user = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.619680] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.619842] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] nova_sys_admin.group = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.620008] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] nova_sys_admin.helper_command = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.620186] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.620350] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.620546] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] nova_sys_admin.user = None {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 524.620688] env[62740]: DEBUG oslo_service.service [None req-46b79790-83d5-47dd-8512-a5433d305dd5 None None] ******************************************************************************** {{(pid=62740) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 524.621121] env[62740]: INFO nova.service [-] Starting compute node (version 0.1.0) [ 524.621966] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Expecting reply to msg bd894e1d35034022b387660817ab2ce4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 524.630771] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd894e1d35034022b387660817ab2ce4 [ 524.631852] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Getting list of instances from cluster (obj){ [ 524.631852] env[62740]: value = "domain-c8" [ 524.631852] env[62740]: _type = "ClusterComputeResource" [ 524.631852] env[62740]: } {{(pid=62740) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 524.633089] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2fc36f-e790-4ef0-beac-e1c68e976d57 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.642165] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Got total of 0 instances {{(pid=62740) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 524.642673] env[62740]: WARNING nova.virt.vmwareapi.driver [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 524.643140] env[62740]: INFO nova.virt.node [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Generated node identity d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 [ 524.643390] env[62740]: INFO nova.virt.node [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Wrote node identity d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 to /opt/stack/data/n-cpu-1/compute_id [ 524.643761] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Expecting reply to msg 802ed4801aaa41489ceb3a7529abb11c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 524.654855] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 802ed4801aaa41489ceb3a7529abb11c [ 524.655322] env[62740]: WARNING nova.compute.manager [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Compute nodes ['d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 524.655911] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Expecting reply to msg b304bf01a0ac4bcbbef2e778b7c3fc68 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 524.681696] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b304bf01a0ac4bcbbef2e778b7c3fc68 [ 524.682357] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Expecting reply to msg f13732d7da1f494380dd1ffcb259605b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 524.692113] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f13732d7da1f494380dd1ffcb259605b [ 524.692538] env[62740]: INFO nova.compute.manager [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 524.692970] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Expecting reply to msg e6a0ab371e4340d98722648e8aa30164 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 524.703369] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6a0ab371e4340d98722648e8aa30164 [ 524.704090] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Expecting reply to msg 9fcb908f2cc9455eb014fc38fc8703ab in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 524.714397] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fcb908f2cc9455eb014fc38fc8703ab [ 524.715069] env[62740]: WARNING nova.compute.manager [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 524.715279] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.715492] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.715643] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 524.715793] env[62740]: DEBUG nova.compute.resource_tracker [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 524.716808] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba2bc61-4c33-43ef-b60f-6aec5089b60e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.724543] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541ece16-4529-4749-8911-fcf1b8025d37 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.738285] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2caaeb3-665e-464c-8dc7-8fccf2946936 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.744100] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e8ff95-0be3-4a8d-afd5-f9542adb16bd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.775038] env[62740]: DEBUG nova.compute.resource_tracker [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181694MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 524.775185] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.775372] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.775706] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Expecting reply to msg 3f800d7315bf46d782dd0e5ed99ebcf2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 524.787670] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f800d7315bf46d782dd0e5ed99ebcf2 [ 524.788212] env[62740]: WARNING nova.compute.resource_tracker [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] No compute node record for cpu-1:d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 could not be found. [ 524.789249] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Expecting reply to msg e1f2ab2a5a8142a289eab06562c43fee in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 524.800119] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1f2ab2a5a8142a289eab06562c43fee [ 524.800902] env[62740]: INFO nova.compute.resource_tracker [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 [ 524.801304] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Expecting reply to msg 4d380ffc112046b89d93001681526183 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 524.812526] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d380ffc112046b89d93001681526183 [ 524.813408] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Expecting reply to msg 798f20b8b6a44d42a1243bc78fbe817c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 524.831314] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 798f20b8b6a44d42a1243bc78fbe817c [ 524.831867] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Expecting reply to msg db4a38bfa50848fe88ff66ac15635757 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 524.855051] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db4a38bfa50848fe88ff66ac15635757 [ 524.855706] env[62740]: DEBUG nova.compute.resource_tracker [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 524.855870] env[62740]: DEBUG nova.compute.resource_tracker [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 524.964052] env[62740]: INFO nova.scheduler.client.report [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] [req-1de41e3b-7e1a-46d5-9e5b-99bd22cfa11a] Created resource provider record via placement API for resource provider with UUID d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 524.984809] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039e18e5-c793-4a57-b621-397eeb69426d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.992728] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99991bbc-3d6f-4f6f-8b93-7acb735e7615 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.021513] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26dfc460-203a-456b-a2f6-1e275fe7e97f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.028017] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3515d0f4-99c0-4048-a42c-a1077992cf77 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.041036] env[62740]: DEBUG nova.compute.provider_tree [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Updating inventory in ProviderTree for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 525.041566] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Expecting reply to msg 605ec56671734d9f960f503d6749867f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 525.048560] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 605ec56671734d9f960f503d6749867f [ 525.077678] env[62740]: DEBUG nova.scheduler.client.report [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Updated inventory for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 525.077898] env[62740]: DEBUG nova.compute.provider_tree [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Updating resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 generation from 0 to 1 during operation: update_inventory {{(pid=62740) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 525.078049] env[62740]: DEBUG nova.compute.provider_tree [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Updating inventory in ProviderTree for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 525.135430] env[62740]: DEBUG nova.compute.provider_tree [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Updating resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 generation from 1 to 2 during operation: update_traits {{(pid=62740) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 525.137703] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Expecting reply to msg 004a75c0a5ae493d9d61a002020f2f63 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 525.153220] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 004a75c0a5ae493d9d61a002020f2f63 [ 525.153863] env[62740]: DEBUG nova.compute.resource_tracker [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 525.154047] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.379s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 525.154211] env[62740]: DEBUG nova.service [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Creating RPC server for service compute {{(pid=62740) start /opt/stack/nova/nova/service.py:182}} [ 525.164513] env[62740]: INFO oslo.messaging._drivers.impl_rabbit [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Creating fanout queue: compute_fanout_2064caa26f3b4178a4d13f5b63093cbc [ 525.167748] env[62740]: DEBUG nova.service [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] Join ServiceGroup membership for this service compute {{(pid=62740) start /opt/stack/nova/nova/service.py:199}} [ 525.167917] env[62740]: DEBUG nova.servicegroup.drivers.db [None req-ed7f4ecd-af43-4b14-8bbb-73cada333de3 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62740) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 530.175021] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5cd64ddead554c8a8c855281542d421e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 530.184052] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cd64ddead554c8a8c855281542d421e [ 534.457362] env[62740]: DEBUG dbcounter [-] [62740] Writing DB stats nova_cell0:SELECT=1 {{(pid=62740) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 534.458160] env[62740]: DEBUG dbcounter [-] [62740] Writing DB stats nova_cell1:SELECT=1 {{(pid=62740) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 557.599359] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Acquiring lock "8c8e4056-cc51-4aaf-81f7-55625bb2b186" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.599718] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Lock "8c8e4056-cc51-4aaf-81f7-55625bb2b186" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.600297] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg e015eab186f64f4e9ea65173e15cceb9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 557.630403] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e015eab186f64f4e9ea65173e15cceb9 [ 557.633108] env[62740]: DEBUG nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 557.636027] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg 2ee5520fd4fe484fabd73062b13429c5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 557.707787] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ee5520fd4fe484fabd73062b13429c5 [ 557.744751] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.744965] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.746779] env[62740]: INFO nova.compute.claims [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 557.748534] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg eab6eebfbbee46aca2e3e3b0bda0864c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 557.824747] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eab6eebfbbee46aca2e3e3b0bda0864c [ 557.828568] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg 6b28b3c90b504ed8824d762430531889 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 557.843143] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b28b3c90b504ed8824d762430531889 [ 557.882176] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633ea6d9-8c52-4acb-9bae-e081a862a346 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.891027] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3552f0-2c4b-4761-a8a6-5d3cd917f0ac {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.924451] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3760ba-f43c-4675-bb62-00db942df2c8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.932842] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578aa4d7-8e3d-40d8-b294-2efa82d8d062 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.947399] env[62740]: DEBUG nova.compute.provider_tree [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 557.947399] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg a3b2fda3e2074b87b4ec28ffebf445b6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 557.958124] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3b2fda3e2074b87b4ec28ffebf445b6 [ 557.959194] env[62740]: DEBUG nova.scheduler.client.report [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 557.961402] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg e40b4595d42645fc8ddf1379962e1509 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 557.972865] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e40b4595d42645fc8ddf1379962e1509 [ 557.973713] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.229s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.974275] env[62740]: DEBUG nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 557.975946] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg 0df95de0a415482ab90904a47a956adb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 558.015215] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0df95de0a415482ab90904a47a956adb [ 558.018456] env[62740]: DEBUG nova.compute.utils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 558.020770] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg 7b02d64b098545c3981829ebdadd931e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 558.023530] env[62740]: DEBUG nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 558.024177] env[62740]: DEBUG nova.network.neutron [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 558.036623] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b02d64b098545c3981829ebdadd931e [ 558.037848] env[62740]: DEBUG nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 558.040491] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg 8434d020d0ba4eef980d638a6bfc3e7a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 558.078839] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8434d020d0ba4eef980d638a6bfc3e7a [ 558.082538] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg 39d545278c1b4cecb3b6540d6a8f8a1b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 558.113302] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39d545278c1b4cecb3b6540d6a8f8a1b [ 558.114797] env[62740]: DEBUG nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 559.789117] env[62740]: DEBUG nova.virt.hardware [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 559.789117] env[62740]: DEBUG nova.virt.hardware [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 559.789117] env[62740]: DEBUG nova.virt.hardware [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 559.789475] env[62740]: DEBUG nova.virt.hardware [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 559.789475] env[62740]: DEBUG nova.virt.hardware [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 559.789475] env[62740]: DEBUG nova.virt.hardware [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 559.789475] env[62740]: DEBUG nova.virt.hardware [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 559.789927] env[62740]: DEBUG nova.virt.hardware [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 559.790640] env[62740]: DEBUG nova.virt.hardware [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 559.790943] env[62740]: DEBUG nova.virt.hardware [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 559.791263] env[62740]: DEBUG nova.virt.hardware [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 559.794021] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d90866-9326-4839-b679-dee196b68555 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.801553] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed375259-9fd7-4ac8-9561-49acc060a0f9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.818805] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfcc1c3-cee5-41ea-9453-76b1244f4ab7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.890218] env[62740]: DEBUG nova.policy [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69035e09a5ea49da89d16baac72be0cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e49f04421e1d492181bc51feac771013', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 560.445428] env[62740]: DEBUG nova.network.neutron [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Successfully created port: 506bbb72-1ede-43ea-bdca-ff2174eda49e {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 563.072451] env[62740]: DEBUG oslo_concurrency.lockutils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Acquiring lock "36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.072451] env[62740]: DEBUG oslo_concurrency.lockutils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Lock "36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.073224] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 873862a8b82c424386cc4fc94871aef3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 563.100586] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 873862a8b82c424386cc4fc94871aef3 [ 563.102589] env[62740]: DEBUG nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 563.104670] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 7ea1f88fde2545009b2f29570da089e3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 563.157169] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ea1f88fde2545009b2f29570da089e3 [ 563.187516] env[62740]: DEBUG oslo_concurrency.lockutils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.187763] env[62740]: DEBUG oslo_concurrency.lockutils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.189381] env[62740]: INFO nova.compute.claims [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 563.191639] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 5384801147d8482d84189bf1c1e0ba5a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 563.239242] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5384801147d8482d84189bf1c1e0ba5a [ 563.241022] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg bad0892dce6b438abb3be2d14fc87e58 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 563.259714] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bad0892dce6b438abb3be2d14fc87e58 [ 563.311275] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c1e691-f7dc-4bba-aa46-7ee83d8dc679 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.320666] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83dd552a-2703-40ae-bf96-84d2e3a5827a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.356703] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98d8c33-cb41-460c-9cca-7caba6cfe27e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.364600] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06ed577-90d2-49a8-ab30-b81e82b5c9b8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.380877] env[62740]: DEBUG nova.compute.provider_tree [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 563.381529] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg e3f7be53c05c41768df173338641ac1d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 563.393731] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3f7be53c05c41768df173338641ac1d [ 563.394877] env[62740]: DEBUG nova.scheduler.client.report [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 563.397735] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 0519ad1081424cc3b44ebdd534ed5c52 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 563.418684] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0519ad1081424cc3b44ebdd534ed5c52 [ 563.419572] env[62740]: DEBUG oslo_concurrency.lockutils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.232s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.420197] env[62740]: DEBUG nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 563.422478] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 9e86f28d62ec4f6cb0606a1bb437794a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 563.458303] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e86f28d62ec4f6cb0606a1bb437794a [ 563.459532] env[62740]: DEBUG nova.compute.utils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 563.460466] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 71444b6173d741ef9f8a9548828b974c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 563.462036] env[62740]: DEBUG nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 563.462140] env[62740]: DEBUG nova.network.neutron [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 563.473034] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71444b6173d741ef9f8a9548828b974c [ 563.473584] env[62740]: DEBUG nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 563.475225] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 96d0f3ff1ed54b81b58c7811bfc92930 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 563.513715] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96d0f3ff1ed54b81b58c7811bfc92930 [ 563.517043] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 2681845a653d42e29b142b15bcd69ee4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 563.552492] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2681845a653d42e29b142b15bcd69ee4 [ 563.553689] env[62740]: DEBUG nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 563.585553] env[62740]: DEBUG nova.virt.hardware [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 563.586164] env[62740]: DEBUG nova.virt.hardware [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 563.586470] env[62740]: DEBUG nova.virt.hardware [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 563.586789] env[62740]: DEBUG nova.virt.hardware [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 563.587124] env[62740]: DEBUG nova.virt.hardware [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 563.587401] env[62740]: DEBUG nova.virt.hardware [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 563.589170] env[62740]: DEBUG nova.virt.hardware [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 563.589170] env[62740]: DEBUG nova.virt.hardware [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 563.589170] env[62740]: DEBUG nova.virt.hardware [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 563.589170] env[62740]: DEBUG nova.virt.hardware [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 563.589312] env[62740]: DEBUG nova.virt.hardware [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 563.590066] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35299df8-4420-443a-8e97-6f984b5e346e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.600703] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94eeaf1-a7b3-4052-b7a9-b333579122e7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.836326] env[62740]: DEBUG nova.policy [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0822ee929114d7796d4d17145960929', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c7fa904a1f0444095737f2528982c72', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 565.192339] env[62740]: DEBUG nova.network.neutron [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Successfully created port: 8cadbe5e-e759-4116-a9e0-efba0a595343 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 566.220331] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Acquiring lock "6b804847-a51d-4e01-90a9-bd3f8116f95f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.220331] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Lock "6b804847-a51d-4e01-90a9-bd3f8116f95f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.220603] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg 8dbe79b95c1c43c589494c9a787dd702 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 566.232177] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8dbe79b95c1c43c589494c9a787dd702 [ 566.233027] env[62740]: DEBUG nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 566.234374] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg a66f8fdc334c4d8ba56c366a2529ef01 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 566.290022] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a66f8fdc334c4d8ba56c366a2529ef01 [ 566.320090] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.321652] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.322716] env[62740]: INFO nova.compute.claims [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 566.325294] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg 1d6cf2c18f814cff916e122da2147ee2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 566.374163] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d6cf2c18f814cff916e122da2147ee2 [ 566.377130] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg 0d3d648eb1b146168e6a3c7ddc1cd286 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 566.392065] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d3d648eb1b146168e6a3c7ddc1cd286 [ 566.470675] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba08e43-22ae-4d1c-9ea0-8a3285924a00 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.479021] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00d11f9-6358-4aec-a36d-1a3b9cdeaff7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.515177] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de961fad-44c2-4bb2-965e-93f7cf0a635f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.523251] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda1a98f-423f-4ed1-8baa-2e5cb689043c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.537950] env[62740]: DEBUG nova.compute.provider_tree [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.538536] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg cb883cd3ca4c4480b9b795f9d6c85869 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 566.547194] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb883cd3ca4c4480b9b795f9d6c85869 [ 566.548212] env[62740]: DEBUG nova.scheduler.client.report [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 566.550786] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg 2aa05135d4714fac8eb1b43f83717e2f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 566.573280] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2aa05135d4714fac8eb1b43f83717e2f [ 566.574473] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.254s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.575171] env[62740]: DEBUG nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 566.577291] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg dcd5fac43c5144ec91e0f7ff45a77ec2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 566.624164] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcd5fac43c5144ec91e0f7ff45a77ec2 [ 566.625928] env[62740]: DEBUG nova.compute.utils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 566.626214] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg 7e7eaaea4ddc425f9ac006c68b559fa7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 566.627181] env[62740]: DEBUG nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 566.627351] env[62740]: DEBUG nova.network.neutron [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 566.644252] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e7eaaea4ddc425f9ac006c68b559fa7 [ 566.644834] env[62740]: DEBUG nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 566.648219] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg 5088e71174234a5b90ee41adc6ab51c4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 566.694061] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5088e71174234a5b90ee41adc6ab51c4 [ 566.697684] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg f899d88cbcb8483b8ebb499671ebb468 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 566.735796] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f899d88cbcb8483b8ebb499671ebb468 [ 566.738731] env[62740]: DEBUG nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 566.777457] env[62740]: DEBUG nova.virt.hardware [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 566.777716] env[62740]: DEBUG nova.virt.hardware [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 566.777881] env[62740]: DEBUG nova.virt.hardware [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 566.778501] env[62740]: DEBUG nova.virt.hardware [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 566.778501] env[62740]: DEBUG nova.virt.hardware [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 566.778501] env[62740]: DEBUG nova.virt.hardware [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 566.778652] env[62740]: DEBUG nova.virt.hardware [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 566.779346] env[62740]: DEBUG nova.virt.hardware [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 566.780041] env[62740]: DEBUG nova.virt.hardware [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 566.780041] env[62740]: DEBUG nova.virt.hardware [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 566.780041] env[62740]: DEBUG nova.virt.hardware [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 566.781175] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396d1037-1c5e-48ba-8d51-786f07053850 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.789588] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0420c57b-dee6-4e9d-a80f-770e81266b2a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.921625] env[62740]: DEBUG nova.policy [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd45b168143584471936a982fbe35e4b6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '913a1345242c4a21a82f565caea73ae2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 567.819828] env[62740]: ERROR nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 506bbb72-1ede-43ea-bdca-ff2174eda49e, please check neutron logs for more information. [ 567.819828] env[62740]: ERROR nova.compute.manager Traceback (most recent call last): [ 567.819828] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 567.819828] env[62740]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 567.819828] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 567.819828] env[62740]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 567.819828] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 567.819828] env[62740]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 567.819828] env[62740]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.819828] env[62740]: ERROR nova.compute.manager self.force_reraise() [ 567.819828] env[62740]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.819828] env[62740]: ERROR nova.compute.manager raise self.value [ 567.819828] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 567.819828] env[62740]: ERROR nova.compute.manager updated_port = self._update_port( [ 567.819828] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.819828] env[62740]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 567.820440] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.820440] env[62740]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 567.820440] env[62740]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 506bbb72-1ede-43ea-bdca-ff2174eda49e, please check neutron logs for more information. [ 567.820440] env[62740]: ERROR nova.compute.manager [ 567.824020] env[62740]: Traceback (most recent call last): [ 567.824020] env[62740]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 567.824020] env[62740]: listener.cb(fileno) [ 567.824020] env[62740]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 567.824020] env[62740]: result = function(*args, **kwargs) [ 567.824020] env[62740]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 567.824020] env[62740]: return func(*args, **kwargs) [ 567.824020] env[62740]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 567.824020] env[62740]: raise e [ 567.824020] env[62740]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 567.824020] env[62740]: nwinfo = self.network_api.allocate_for_instance( [ 567.824020] env[62740]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 567.824020] env[62740]: created_port_ids = self._update_ports_for_instance( [ 567.824020] env[62740]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 567.824020] env[62740]: with excutils.save_and_reraise_exception(): [ 567.824020] env[62740]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.824020] env[62740]: self.force_reraise() [ 567.824020] env[62740]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.824020] env[62740]: raise self.value [ 567.824020] env[62740]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 567.824020] env[62740]: updated_port = self._update_port( [ 567.824020] env[62740]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.824020] env[62740]: _ensure_no_port_binding_failure(port) [ 567.824020] env[62740]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.824020] env[62740]: raise exception.PortBindingFailed(port_id=port['id']) [ 567.824020] env[62740]: nova.exception.PortBindingFailed: Binding failed for port 506bbb72-1ede-43ea-bdca-ff2174eda49e, please check neutron logs for more information. [ 567.824020] env[62740]: Removing descriptor: 16 [ 567.825165] env[62740]: ERROR nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 506bbb72-1ede-43ea-bdca-ff2174eda49e, please check neutron logs for more information. [ 567.825165] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Traceback (most recent call last): [ 567.825165] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 567.825165] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] yield resources [ 567.825165] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 567.825165] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] self.driver.spawn(context, instance, image_meta, [ 567.825165] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 567.825165] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] self._vmops.spawn(context, instance, image_meta, injected_files, [ 567.825165] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 567.825165] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] vm_ref = self.build_virtual_machine(instance, [ 567.825165] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 567.825426] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] vif_infos = vmwarevif.get_vif_info(self._session, [ 567.825426] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 567.825426] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] for vif in network_info: [ 567.825426] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 567.825426] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] return self._sync_wrapper(fn, *args, **kwargs) [ 567.825426] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 567.825426] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] self.wait() [ 567.825426] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 567.825426] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] self[:] = self._gt.wait() [ 567.825426] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 567.825426] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] return self._exit_event.wait() [ 567.825426] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 567.825426] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] result = hub.switch() [ 567.825737] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 567.825737] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] return self.greenlet.switch() [ 567.825737] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 567.825737] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] result = function(*args, **kwargs) [ 567.825737] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 567.825737] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] return func(*args, **kwargs) [ 567.825737] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 567.825737] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] raise e [ 567.825737] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 567.825737] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] nwinfo = self.network_api.allocate_for_instance( [ 567.825737] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 567.825737] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] created_port_ids = self._update_ports_for_instance( [ 567.825737] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 567.826439] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] with excutils.save_and_reraise_exception(): [ 567.826439] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.826439] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] self.force_reraise() [ 567.826439] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.826439] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] raise self.value [ 567.826439] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 567.826439] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] updated_port = self._update_port( [ 567.826439] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.826439] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] _ensure_no_port_binding_failure(port) [ 567.826439] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.826439] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] raise exception.PortBindingFailed(port_id=port['id']) [ 567.826439] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] nova.exception.PortBindingFailed: Binding failed for port 506bbb72-1ede-43ea-bdca-ff2174eda49e, please check neutron logs for more information. [ 567.826439] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] [ 567.826780] env[62740]: INFO nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Terminating instance [ 567.826861] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Acquiring lock "refresh_cache-8c8e4056-cc51-4aaf-81f7-55625bb2b186" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.827296] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Acquired lock "refresh_cache-8c8e4056-cc51-4aaf-81f7-55625bb2b186" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.827785] env[62740]: DEBUG nova.network.neutron [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 567.828328] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg edfe9a4a12d141719be14424f5bb7148 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 567.838782] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edfe9a4a12d141719be14424f5bb7148 [ 567.906770] env[62740]: DEBUG nova.network.neutron [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 568.153364] env[62740]: DEBUG nova.network.neutron [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.153884] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg f580434007c9438a93f7bd63204ff709 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 568.168498] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f580434007c9438a93f7bd63204ff709 [ 568.169238] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Releasing lock "refresh_cache-8c8e4056-cc51-4aaf-81f7-55625bb2b186" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.169631] env[62740]: DEBUG nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 568.169817] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 568.170390] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-418ede3b-4df3-4920-9763-500d752aa4f8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.185070] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21334e06-aac8-421f-84cd-2d04015ab56a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.207072] env[62740]: DEBUG nova.compute.manager [req-d8e5ba27-630a-448e-bb41-a3c58a1171a4 req-9c897c12-7657-4695-bcc6-0568f02270b5 service nova] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Received event network-changed-506bbb72-1ede-43ea-bdca-ff2174eda49e {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 568.207296] env[62740]: DEBUG nova.compute.manager [req-d8e5ba27-630a-448e-bb41-a3c58a1171a4 req-9c897c12-7657-4695-bcc6-0568f02270b5 service nova] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Refreshing instance network info cache due to event network-changed-506bbb72-1ede-43ea-bdca-ff2174eda49e. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 568.207502] env[62740]: DEBUG oslo_concurrency.lockutils [req-d8e5ba27-630a-448e-bb41-a3c58a1171a4 req-9c897c12-7657-4695-bcc6-0568f02270b5 service nova] Acquiring lock "refresh_cache-8c8e4056-cc51-4aaf-81f7-55625bb2b186" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.207637] env[62740]: DEBUG oslo_concurrency.lockutils [req-d8e5ba27-630a-448e-bb41-a3c58a1171a4 req-9c897c12-7657-4695-bcc6-0568f02270b5 service nova] Acquired lock "refresh_cache-8c8e4056-cc51-4aaf-81f7-55625bb2b186" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.207792] env[62740]: DEBUG nova.network.neutron [req-d8e5ba27-630a-448e-bb41-a3c58a1171a4 req-9c897c12-7657-4695-bcc6-0568f02270b5 service nova] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Refreshing network info cache for port 506bbb72-1ede-43ea-bdca-ff2174eda49e {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 568.208307] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-d8e5ba27-630a-448e-bb41-a3c58a1171a4 req-9c897c12-7657-4695-bcc6-0568f02270b5 service nova] Expecting reply to msg da504606a43d4699875e645ecb81d009 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 568.215672] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8c8e4056-cc51-4aaf-81f7-55625bb2b186 could not be found. [ 568.216152] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 568.216599] env[62740]: INFO nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Took 0.05 seconds to destroy the instance on the hypervisor. [ 568.216875] env[62740]: DEBUG oslo.service.loopingcall [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 568.217540] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da504606a43d4699875e645ecb81d009 [ 568.218132] env[62740]: DEBUG nova.compute.manager [-] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 568.218262] env[62740]: DEBUG nova.network.neutron [-] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 568.264062] env[62740]: DEBUG nova.network.neutron [req-d8e5ba27-630a-448e-bb41-a3c58a1171a4 req-9c897c12-7657-4695-bcc6-0568f02270b5 service nova] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 568.295537] env[62740]: DEBUG nova.network.neutron [-] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 568.296069] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d12a1b2e4423404abcd0826707741a2e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 568.310078] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d12a1b2e4423404abcd0826707741a2e [ 568.310376] env[62740]: DEBUG nova.network.neutron [-] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.310702] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7a2e1bea67804aa6b9f46abcd57b5a13 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 568.324672] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a2e1bea67804aa6b9f46abcd57b5a13 [ 568.324672] env[62740]: INFO nova.compute.manager [-] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Took 0.10 seconds to deallocate network for instance. [ 568.325691] env[62740]: DEBUG nova.compute.claims [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 568.326311] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.326739] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.328992] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg 027a0d6a9bd7491e85428b4f1c26e2e3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 568.387686] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 027a0d6a9bd7491e85428b4f1c26e2e3 [ 568.468712] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3a8ac1-3059-4f2e-80d1-05e68c2cc3fd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.479378] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26de587f-145e-4e51-947a-efc12f06c245 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.516458] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fecd415-b161-4d74-955d-46203f474ac1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.524428] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c5cdfc-9a5a-4195-81cb-9bf1dbf7e614 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.540269] env[62740]: DEBUG nova.compute.provider_tree [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 568.540608] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg 8794bf8388b24a33bd3e7b2a18a13a60 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 568.556189] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8794bf8388b24a33bd3e7b2a18a13a60 [ 568.556189] env[62740]: DEBUG nova.scheduler.client.report [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 568.559174] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg e521ebbeb8d34c079a512d9557693c4e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 568.591896] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e521ebbeb8d34c079a512d9557693c4e [ 568.592902] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.266s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.593683] env[62740]: ERROR nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 506bbb72-1ede-43ea-bdca-ff2174eda49e, please check neutron logs for more information. [ 568.593683] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Traceback (most recent call last): [ 568.593683] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 568.593683] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] self.driver.spawn(context, instance, image_meta, [ 568.593683] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 568.593683] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] self._vmops.spawn(context, instance, image_meta, injected_files, [ 568.593683] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 568.593683] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] vm_ref = self.build_virtual_machine(instance, [ 568.593683] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 568.593683] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] vif_infos = vmwarevif.get_vif_info(self._session, [ 568.593683] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 568.594010] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] for vif in network_info: [ 568.594010] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 568.594010] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] return self._sync_wrapper(fn, *args, **kwargs) [ 568.594010] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 568.594010] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] self.wait() [ 568.594010] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 568.594010] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] self[:] = self._gt.wait() [ 568.594010] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 568.594010] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] return self._exit_event.wait() [ 568.594010] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 568.594010] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] result = hub.switch() [ 568.594010] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 568.594010] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] return self.greenlet.switch() [ 568.594337] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 568.594337] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] result = function(*args, **kwargs) [ 568.594337] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 568.594337] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] return func(*args, **kwargs) [ 568.594337] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 568.594337] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] raise e [ 568.594337] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 568.594337] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] nwinfo = self.network_api.allocate_for_instance( [ 568.594337] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 568.594337] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] created_port_ids = self._update_ports_for_instance( [ 568.594337] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 568.594337] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] with excutils.save_and_reraise_exception(): [ 568.594337] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 568.594663] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] self.force_reraise() [ 568.594663] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 568.594663] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] raise self.value [ 568.594663] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 568.594663] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] updated_port = self._update_port( [ 568.594663] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 568.594663] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] _ensure_no_port_binding_failure(port) [ 568.594663] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 568.594663] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] raise exception.PortBindingFailed(port_id=port['id']) [ 568.594663] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] nova.exception.PortBindingFailed: Binding failed for port 506bbb72-1ede-43ea-bdca-ff2174eda49e, please check neutron logs for more information. [ 568.594663] env[62740]: ERROR nova.compute.manager [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] [ 568.594998] env[62740]: DEBUG nova.compute.utils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Binding failed for port 506bbb72-1ede-43ea-bdca-ff2174eda49e, please check neutron logs for more information. {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 568.602994] env[62740]: DEBUG nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Build of instance 8c8e4056-cc51-4aaf-81f7-55625bb2b186 was re-scheduled: Binding failed for port 506bbb72-1ede-43ea-bdca-ff2174eda49e, please check neutron logs for more information. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 568.603433] env[62740]: DEBUG nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 568.604181] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Acquiring lock "refresh_cache-8c8e4056-cc51-4aaf-81f7-55625bb2b186" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.895756] env[62740]: DEBUG nova.network.neutron [req-d8e5ba27-630a-448e-bb41-a3c58a1171a4 req-9c897c12-7657-4695-bcc6-0568f02270b5 service nova] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.899879] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-d8e5ba27-630a-448e-bb41-a3c58a1171a4 req-9c897c12-7657-4695-bcc6-0568f02270b5 service nova] Expecting reply to msg 97e755248a824cd78c1ad31d80205bf5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 568.915927] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97e755248a824cd78c1ad31d80205bf5 [ 568.915927] env[62740]: DEBUG oslo_concurrency.lockutils [req-d8e5ba27-630a-448e-bb41-a3c58a1171a4 req-9c897c12-7657-4695-bcc6-0568f02270b5 service nova] Releasing lock "refresh_cache-8c8e4056-cc51-4aaf-81f7-55625bb2b186" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.915927] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Acquired lock "refresh_cache-8c8e4056-cc51-4aaf-81f7-55625bb2b186" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.915927] env[62740]: DEBUG nova.network.neutron [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 568.915927] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg 6850bd58da744712b25886c57bba75cc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 568.928370] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6850bd58da744712b25886c57bba75cc [ 569.044158] env[62740]: DEBUG nova.network.neutron [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 569.449653] env[62740]: DEBUG nova.network.neutron [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.450140] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg 5e36bce97c4f41bbbc8db7c5fef05df8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 569.463152] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e36bce97c4f41bbbc8db7c5fef05df8 [ 569.463743] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Releasing lock "refresh_cache-8c8e4056-cc51-4aaf-81f7-55625bb2b186" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.463957] env[62740]: DEBUG nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 569.464139] env[62740]: DEBUG nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 569.464307] env[62740]: DEBUG nova.network.neutron [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 569.546397] env[62740]: DEBUG nova.network.neutron [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 569.547333] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg 03e6df2fc4d24aa8bbee084420b30eea in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 569.556957] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03e6df2fc4d24aa8bbee084420b30eea [ 569.557556] env[62740]: DEBUG nova.network.neutron [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.558074] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg c8a24d73a0ce403aa05e29f3b62356a1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 569.572647] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8a24d73a0ce403aa05e29f3b62356a1 [ 569.573747] env[62740]: INFO nova.compute.manager [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] [instance: 8c8e4056-cc51-4aaf-81f7-55625bb2b186] Took 0.11 seconds to deallocate network for instance. [ 569.576168] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg 253908392966483a97f43d312dec3a06 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 569.624186] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 253908392966483a97f43d312dec3a06 [ 569.627649] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg bced1a04e68a46c582c80e554536823d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 569.678510] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bced1a04e68a46c582c80e554536823d [ 569.709853] env[62740]: INFO nova.scheduler.client.report [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Deleted allocations for instance 8c8e4056-cc51-4aaf-81f7-55625bb2b186 [ 569.720715] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Expecting reply to msg 55332a43956248f4aa7a1fd552df7a60 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 569.740604] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55332a43956248f4aa7a1fd552df7a60 [ 569.741923] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c3a48a1-683a-4e1b-b088-e38b124325ac tempest-ServersAdminNegativeTestJSON-2127400692 tempest-ServersAdminNegativeTestJSON-2127400692-project-member] Lock "8c8e4056-cc51-4aaf-81f7-55625bb2b186" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.142s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.197598] env[62740]: DEBUG nova.network.neutron [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Successfully created port: 9b507713-e727-4c28-8059-3547a8442e1e {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 573.367981] env[62740]: ERROR nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8cadbe5e-e759-4116-a9e0-efba0a595343, please check neutron logs for more information. [ 573.367981] env[62740]: ERROR nova.compute.manager Traceback (most recent call last): [ 573.367981] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 573.367981] env[62740]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 573.367981] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 573.367981] env[62740]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 573.367981] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 573.367981] env[62740]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 573.367981] env[62740]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.367981] env[62740]: ERROR nova.compute.manager self.force_reraise() [ 573.367981] env[62740]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.367981] env[62740]: ERROR nova.compute.manager raise self.value [ 573.367981] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 573.367981] env[62740]: ERROR nova.compute.manager updated_port = self._update_port( [ 573.367981] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.367981] env[62740]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 573.368893] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.368893] env[62740]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 573.368893] env[62740]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8cadbe5e-e759-4116-a9e0-efba0a595343, please check neutron logs for more information. [ 573.368893] env[62740]: ERROR nova.compute.manager [ 573.368893] env[62740]: Traceback (most recent call last): [ 573.368893] env[62740]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 573.368893] env[62740]: listener.cb(fileno) [ 573.368893] env[62740]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 573.368893] env[62740]: result = function(*args, **kwargs) [ 573.368893] env[62740]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 573.368893] env[62740]: return func(*args, **kwargs) [ 573.368893] env[62740]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 573.368893] env[62740]: raise e [ 573.368893] env[62740]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 573.368893] env[62740]: nwinfo = self.network_api.allocate_for_instance( [ 573.368893] env[62740]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 573.368893] env[62740]: created_port_ids = self._update_ports_for_instance( [ 573.368893] env[62740]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 573.368893] env[62740]: with excutils.save_and_reraise_exception(): [ 573.368893] env[62740]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.368893] env[62740]: self.force_reraise() [ 573.368893] env[62740]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.368893] env[62740]: raise self.value [ 573.368893] env[62740]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 573.368893] env[62740]: updated_port = self._update_port( [ 573.368893] env[62740]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.368893] env[62740]: _ensure_no_port_binding_failure(port) [ 573.368893] env[62740]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.368893] env[62740]: raise exception.PortBindingFailed(port_id=port['id']) [ 573.369601] env[62740]: nova.exception.PortBindingFailed: Binding failed for port 8cadbe5e-e759-4116-a9e0-efba0a595343, please check neutron logs for more information. [ 573.369601] env[62740]: Removing descriptor: 17 [ 573.369601] env[62740]: ERROR nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8cadbe5e-e759-4116-a9e0-efba0a595343, please check neutron logs for more information. [ 573.369601] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Traceback (most recent call last): [ 573.369601] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 573.369601] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] yield resources [ 573.369601] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 573.369601] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] self.driver.spawn(context, instance, image_meta, [ 573.369601] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 573.369601] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 573.369601] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 573.369601] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] vm_ref = self.build_virtual_machine(instance, [ 573.369934] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 573.369934] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] vif_infos = vmwarevif.get_vif_info(self._session, [ 573.369934] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 573.369934] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] for vif in network_info: [ 573.369934] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 573.369934] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] return self._sync_wrapper(fn, *args, **kwargs) [ 573.369934] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 573.369934] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] self.wait() [ 573.369934] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 573.369934] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] self[:] = self._gt.wait() [ 573.369934] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 573.369934] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] return self._exit_event.wait() [ 573.369934] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 573.370286] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] result = hub.switch() [ 573.370286] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 573.370286] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] return self.greenlet.switch() [ 573.370286] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 573.370286] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] result = function(*args, **kwargs) [ 573.370286] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 573.370286] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] return func(*args, **kwargs) [ 573.370286] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 573.370286] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] raise e [ 573.370286] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 573.370286] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] nwinfo = self.network_api.allocate_for_instance( [ 573.370286] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 573.370286] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] created_port_ids = self._update_ports_for_instance( [ 573.370661] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 573.370661] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] with excutils.save_and_reraise_exception(): [ 573.370661] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.370661] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] self.force_reraise() [ 573.370661] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.370661] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] raise self.value [ 573.370661] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 573.370661] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] updated_port = self._update_port( [ 573.370661] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.370661] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] _ensure_no_port_binding_failure(port) [ 573.370661] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.370661] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] raise exception.PortBindingFailed(port_id=port['id']) [ 573.370972] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] nova.exception.PortBindingFailed: Binding failed for port 8cadbe5e-e759-4116-a9e0-efba0a595343, please check neutron logs for more information. [ 573.370972] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] [ 573.370972] env[62740]: INFO nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Terminating instance [ 573.373024] env[62740]: DEBUG oslo_concurrency.lockutils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Acquiring lock "refresh_cache-36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.373113] env[62740]: DEBUG oslo_concurrency.lockutils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Acquired lock "refresh_cache-36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.373306] env[62740]: DEBUG nova.network.neutron [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 573.373748] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg b4a2d573e1cd493fa613cf0f032f3eec in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 573.385789] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4a2d573e1cd493fa613cf0f032f3eec [ 573.495397] env[62740]: DEBUG nova.network.neutron [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 574.163791] env[62740]: DEBUG nova.network.neutron [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.164428] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 8bc522cb4e13475085015a453b70b4bc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 574.177081] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bc522cb4e13475085015a453b70b4bc [ 574.177081] env[62740]: DEBUG oslo_concurrency.lockutils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Releasing lock "refresh_cache-36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.177081] env[62740]: DEBUG nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 574.177081] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 574.178068] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-96f4cf55-c4e6-44be-8a66-87fdf4e9d716 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.191633] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcdeca22-984f-46ae-81f8-f031c11ac10c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.224256] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e could not be found. [ 574.228361] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 574.228361] env[62740]: INFO nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 574.228361] env[62740]: DEBUG oslo.service.loopingcall [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 574.230617] env[62740]: DEBUG nova.compute.manager [-] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 574.230889] env[62740]: DEBUG nova.network.neutron [-] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 574.326879] env[62740]: DEBUG nova.network.neutron [-] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 574.326879] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f0a278328e3645008e81990c9e541e27 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 574.339265] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0a278328e3645008e81990c9e541e27 [ 574.339265] env[62740]: DEBUG nova.network.neutron [-] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.339265] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fc7b7635943d4fb098deca46560c8d2f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 574.351086] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc7b7635943d4fb098deca46560c8d2f [ 574.351804] env[62740]: INFO nova.compute.manager [-] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Took 0.12 seconds to deallocate network for instance. [ 574.355437] env[62740]: DEBUG nova.compute.claims [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 574.355550] env[62740]: DEBUG oslo_concurrency.lockutils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.355766] env[62740]: DEBUG oslo_concurrency.lockutils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.357589] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 3e30509dbc9349f8bdd930d9981016cf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 574.402829] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e30509dbc9349f8bdd930d9981016cf [ 574.459626] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fda37ee-7643-4015-a987-3dfec3a90808 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.469259] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47cd0113-8df7-412e-8c2e-ac536a14f38c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.511936] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0657a2c8-1048-46ac-8f27-50e7a0260178 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.521651] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e652784-69f7-4572-99c2-e73829edb16c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.541870] env[62740]: DEBUG nova.compute.provider_tree [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 574.542415] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 7b0e21a398c2408595b2ccb202c04d31 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 574.556910] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b0e21a398c2408595b2ccb202c04d31 [ 574.558364] env[62740]: DEBUG nova.scheduler.client.report [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 574.564376] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 8be6bcc77aab40afb1b4e4bef51019cd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 574.579495] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8be6bcc77aab40afb1b4e4bef51019cd [ 574.580833] env[62740]: DEBUG oslo_concurrency.lockutils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.224s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.580961] env[62740]: ERROR nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8cadbe5e-e759-4116-a9e0-efba0a595343, please check neutron logs for more information. [ 574.580961] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Traceback (most recent call last): [ 574.580961] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 574.580961] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] self.driver.spawn(context, instance, image_meta, [ 574.580961] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 574.580961] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 574.580961] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 574.580961] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] vm_ref = self.build_virtual_machine(instance, [ 574.580961] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 574.580961] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] vif_infos = vmwarevif.get_vif_info(self._session, [ 574.580961] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 574.581269] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] for vif in network_info: [ 574.581269] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 574.581269] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] return self._sync_wrapper(fn, *args, **kwargs) [ 574.581269] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 574.581269] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] self.wait() [ 574.581269] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 574.581269] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] self[:] = self._gt.wait() [ 574.581269] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 574.581269] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] return self._exit_event.wait() [ 574.581269] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 574.581269] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] result = hub.switch() [ 574.581269] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 574.581269] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] return self.greenlet.switch() [ 574.581578] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 574.581578] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] result = function(*args, **kwargs) [ 574.581578] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 574.581578] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] return func(*args, **kwargs) [ 574.581578] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 574.581578] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] raise e [ 574.581578] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 574.581578] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] nwinfo = self.network_api.allocate_for_instance( [ 574.581578] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 574.581578] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] created_port_ids = self._update_ports_for_instance( [ 574.581578] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 574.581578] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] with excutils.save_and_reraise_exception(): [ 574.581578] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.581901] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] self.force_reraise() [ 574.581901] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.581901] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] raise self.value [ 574.581901] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 574.581901] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] updated_port = self._update_port( [ 574.581901] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.581901] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] _ensure_no_port_binding_failure(port) [ 574.581901] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.581901] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] raise exception.PortBindingFailed(port_id=port['id']) [ 574.581901] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] nova.exception.PortBindingFailed: Binding failed for port 8cadbe5e-e759-4116-a9e0-efba0a595343, please check neutron logs for more information. [ 574.581901] env[62740]: ERROR nova.compute.manager [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] [ 574.582197] env[62740]: DEBUG nova.compute.utils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Binding failed for port 8cadbe5e-e759-4116-a9e0-efba0a595343, please check neutron logs for more information. {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 574.584303] env[62740]: DEBUG nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Build of instance 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e was re-scheduled: Binding failed for port 8cadbe5e-e759-4116-a9e0-efba0a595343, please check neutron logs for more information. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 574.584443] env[62740]: DEBUG nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 574.584831] env[62740]: DEBUG oslo_concurrency.lockutils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Acquiring lock "refresh_cache-36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.584831] env[62740]: DEBUG oslo_concurrency.lockutils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Acquired lock "refresh_cache-36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.586994] env[62740]: DEBUG nova.network.neutron [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 574.586994] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 3345d50078844545a59922428c86e125 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 574.594236] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3345d50078844545a59922428c86e125 [ 574.667555] env[62740]: DEBUG nova.network.neutron [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 575.234985] env[62740]: ERROR nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9b507713-e727-4c28-8059-3547a8442e1e, please check neutron logs for more information. [ 575.234985] env[62740]: ERROR nova.compute.manager Traceback (most recent call last): [ 575.234985] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 575.234985] env[62740]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 575.234985] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 575.234985] env[62740]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 575.234985] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 575.234985] env[62740]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 575.234985] env[62740]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 575.234985] env[62740]: ERROR nova.compute.manager self.force_reraise() [ 575.234985] env[62740]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 575.234985] env[62740]: ERROR nova.compute.manager raise self.value [ 575.234985] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 575.234985] env[62740]: ERROR nova.compute.manager updated_port = self._update_port( [ 575.234985] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 575.234985] env[62740]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 575.235467] env[62740]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 575.235467] env[62740]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 575.235467] env[62740]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9b507713-e727-4c28-8059-3547a8442e1e, please check neutron logs for more information. [ 575.235467] env[62740]: ERROR nova.compute.manager [ 575.235467] env[62740]: Traceback (most recent call last): [ 575.235467] env[62740]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 575.235467] env[62740]: listener.cb(fileno) [ 575.235467] env[62740]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 575.235467] env[62740]: result = function(*args, **kwargs) [ 575.235467] env[62740]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 575.235467] env[62740]: return func(*args, **kwargs) [ 575.235467] env[62740]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 575.235467] env[62740]: raise e [ 575.235467] env[62740]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 575.235467] env[62740]: nwinfo = self.network_api.allocate_for_instance( [ 575.235467] env[62740]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 575.235467] env[62740]: created_port_ids = self._update_ports_for_instance( [ 575.235467] env[62740]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 575.235467] env[62740]: with excutils.save_and_reraise_exception(): [ 575.235467] env[62740]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 575.235467] env[62740]: self.force_reraise() [ 575.235467] env[62740]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 575.235467] env[62740]: raise self.value [ 575.235467] env[62740]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 575.235467] env[62740]: updated_port = self._update_port( [ 575.235467] env[62740]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 575.235467] env[62740]: _ensure_no_port_binding_failure(port) [ 575.235467] env[62740]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 575.235467] env[62740]: raise exception.PortBindingFailed(port_id=port['id']) [ 575.236415] env[62740]: nova.exception.PortBindingFailed: Binding failed for port 9b507713-e727-4c28-8059-3547a8442e1e, please check neutron logs for more information. [ 575.236415] env[62740]: Removing descriptor: 18 [ 575.238288] env[62740]: ERROR nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9b507713-e727-4c28-8059-3547a8442e1e, please check neutron logs for more information. [ 575.238288] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Traceback (most recent call last): [ 575.238288] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 575.238288] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] yield resources [ 575.238288] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 575.238288] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] self.driver.spawn(context, instance, image_meta, [ 575.238288] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 575.238288] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 575.238288] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 575.238288] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] vm_ref = self.build_virtual_machine(instance, [ 575.238288] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 575.238782] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] vif_infos = vmwarevif.get_vif_info(self._session, [ 575.238782] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 575.238782] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] for vif in network_info: [ 575.238782] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 575.238782] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] return self._sync_wrapper(fn, *args, **kwargs) [ 575.238782] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 575.238782] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] self.wait() [ 575.238782] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 575.238782] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] self[:] = self._gt.wait() [ 575.238782] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 575.238782] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] return self._exit_event.wait() [ 575.238782] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 575.238782] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] result = hub.switch() [ 575.239227] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 575.239227] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] return self.greenlet.switch() [ 575.239227] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 575.239227] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] result = function(*args, **kwargs) [ 575.239227] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 575.239227] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] return func(*args, **kwargs) [ 575.239227] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 575.239227] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] raise e [ 575.239227] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 575.239227] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] nwinfo = self.network_api.allocate_for_instance( [ 575.239227] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 575.239227] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] created_port_ids = self._update_ports_for_instance( [ 575.239227] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 575.239605] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] with excutils.save_and_reraise_exception(): [ 575.239605] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 575.239605] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] self.force_reraise() [ 575.239605] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 575.239605] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] raise self.value [ 575.239605] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 575.239605] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] updated_port = self._update_port( [ 575.239605] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 575.239605] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] _ensure_no_port_binding_failure(port) [ 575.239605] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 575.239605] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] raise exception.PortBindingFailed(port_id=port['id']) [ 575.239605] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] nova.exception.PortBindingFailed: Binding failed for port 9b507713-e727-4c28-8059-3547a8442e1e, please check neutron logs for more information. [ 575.239605] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] [ 575.242374] env[62740]: INFO nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Terminating instance [ 575.242374] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Acquiring lock "refresh_cache-6b804847-a51d-4e01-90a9-bd3f8116f95f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.242374] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Acquired lock "refresh_cache-6b804847-a51d-4e01-90a9-bd3f8116f95f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.242374] env[62740]: DEBUG nova.network.neutron [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 575.242374] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg a9b5c68f4a78442dbe6d1ab30eae141a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 575.256349] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9b5c68f4a78442dbe6d1ab30eae141a [ 575.299460] env[62740]: DEBUG nova.network.neutron [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 575.417862] env[62740]: DEBUG nova.network.neutron [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.419169] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 85fc955e610c45038b7a8ffa898e5c13 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 575.429727] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85fc955e610c45038b7a8ffa898e5c13 [ 575.430239] env[62740]: DEBUG oslo_concurrency.lockutils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Releasing lock "refresh_cache-36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.430239] env[62740]: DEBUG nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 575.430396] env[62740]: DEBUG nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 575.430467] env[62740]: DEBUG nova.network.neutron [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 575.533649] env[62740]: DEBUG nova.network.neutron [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 575.534249] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 31a3cc337187401f8fb8e1f0766d768f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 575.544127] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31a3cc337187401f8fb8e1f0766d768f [ 575.544520] env[62740]: DEBUG nova.network.neutron [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.544974] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 57edd02a28c24cd9bbcf846aee4fcfed in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 575.556349] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57edd02a28c24cd9bbcf846aee4fcfed [ 575.556963] env[62740]: INFO nova.compute.manager [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] [instance: 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e] Took 0.13 seconds to deallocate network for instance. [ 575.558674] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg c2dcbe11b95d48949055405b2c5a8948 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 575.566219] env[62740]: DEBUG nova.network.neutron [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.566506] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg e45ea0e10bb340719773446a86d1ccaa in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 575.582576] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e45ea0e10bb340719773446a86d1ccaa [ 575.583284] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Releasing lock "refresh_cache-6b804847-a51d-4e01-90a9-bd3f8116f95f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.583626] env[62740]: DEBUG nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 575.585607] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 575.585607] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f9bc8fa8-7d61-49b4-812c-002024c3f96c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.597165] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71721c08-30f5-47d7-926c-9446584c15e9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.621382] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6b804847-a51d-4e01-90a9-bd3f8116f95f could not be found. [ 575.621490] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 575.621739] env[62740]: INFO nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 575.622308] env[62740]: DEBUG oslo.service.loopingcall [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 575.622634] env[62740]: DEBUG nova.compute.manager [-] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 575.622788] env[62740]: DEBUG nova.network.neutron [-] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 575.628588] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2dcbe11b95d48949055405b2c5a8948 [ 575.630084] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg e07a09d7d93b4a38a898c3e3f903e579 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 575.667758] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e07a09d7d93b4a38a898c3e3f903e579 [ 575.676413] env[62740]: DEBUG nova.network.neutron [-] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 575.676917] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e186edfb20314b89a20d4e7def02f0db in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 575.686382] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e186edfb20314b89a20d4e7def02f0db [ 575.687251] env[62740]: DEBUG nova.network.neutron [-] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.687672] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f19233a1e04e478bae109f3d6fc1adae in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 575.701126] env[62740]: INFO nova.scheduler.client.report [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Deleted allocations for instance 36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e [ 575.708035] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f19233a1e04e478bae109f3d6fc1adae [ 575.709082] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Expecting reply to msg 000015a8b48d4bc8a60ff7e295802e78 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 575.711018] env[62740]: INFO nova.compute.manager [-] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Took 0.09 seconds to deallocate network for instance. [ 575.712231] env[62740]: DEBUG nova.compute.claims [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 575.712519] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.712938] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.715103] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg cebf7190bfdc4ec69f3099ecc2679a3c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 575.728317] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 000015a8b48d4bc8a60ff7e295802e78 [ 575.728883] env[62740]: DEBUG oslo_concurrency.lockutils [None req-48701e8e-b400-4298-8c3d-a8a1c9f023c7 tempest-InstanceActionsNegativeTestJSON-1087425453 tempest-InstanceActionsNegativeTestJSON-1087425453-project-member] Lock "36b25dfb-c0d3-4a98-9d63-9f7f4fcc193e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.656s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.806279] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cebf7190bfdc4ec69f3099ecc2679a3c [ 575.842342] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6616458c-2751-4e86-a168-3dec76dbf0f5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.853910] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25496bbe-e301-4c90-a9bd-291b054d6a3e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.895331] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc586b0d-d7de-404e-9bc6-ce31047efd3a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.903878] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e504c40f-1374-494e-b6ab-8ec29d2ff639 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.920270] env[62740]: DEBUG nova.compute.provider_tree [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.920270] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg b57627275e6a413aa25ccb5da5f246cb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 575.930478] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b57627275e6a413aa25ccb5da5f246cb [ 575.930478] env[62740]: DEBUG nova.scheduler.client.report [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 575.934228] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg f3c05fb4e455480d8c4dd653e69eec77 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 575.954954] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3c05fb4e455480d8c4dd653e69eec77 [ 575.956049] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.243s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.957128] env[62740]: ERROR nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9b507713-e727-4c28-8059-3547a8442e1e, please check neutron logs for more information. [ 575.957128] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Traceback (most recent call last): [ 575.957128] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 575.957128] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] self.driver.spawn(context, instance, image_meta, [ 575.957128] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 575.957128] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 575.957128] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 575.957128] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] vm_ref = self.build_virtual_machine(instance, [ 575.957128] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 575.957128] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] vif_infos = vmwarevif.get_vif_info(self._session, [ 575.957128] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 575.960072] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] for vif in network_info: [ 575.960072] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 575.960072] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] return self._sync_wrapper(fn, *args, **kwargs) [ 575.960072] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 575.960072] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] self.wait() [ 575.960072] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 575.960072] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] self[:] = self._gt.wait() [ 575.960072] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 575.960072] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] return self._exit_event.wait() [ 575.960072] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 575.960072] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] result = hub.switch() [ 575.960072] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 575.960072] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] return self.greenlet.switch() [ 575.960527] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 575.960527] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] result = function(*args, **kwargs) [ 575.960527] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 575.960527] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] return func(*args, **kwargs) [ 575.960527] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 575.960527] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] raise e [ 575.960527] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 575.960527] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] nwinfo = self.network_api.allocate_for_instance( [ 575.960527] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 575.960527] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] created_port_ids = self._update_ports_for_instance( [ 575.960527] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 575.960527] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] with excutils.save_and_reraise_exception(): [ 575.960527] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 575.960831] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] self.force_reraise() [ 575.960831] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 575.960831] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] raise self.value [ 575.960831] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 575.960831] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] updated_port = self._update_port( [ 575.960831] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 575.960831] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] _ensure_no_port_binding_failure(port) [ 575.960831] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 575.960831] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] raise exception.PortBindingFailed(port_id=port['id']) [ 575.960831] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] nova.exception.PortBindingFailed: Binding failed for port 9b507713-e727-4c28-8059-3547a8442e1e, please check neutron logs for more information. [ 575.960831] env[62740]: ERROR nova.compute.manager [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] [ 575.961149] env[62740]: DEBUG nova.compute.utils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Binding failed for port 9b507713-e727-4c28-8059-3547a8442e1e, please check neutron logs for more information. {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 575.965156] env[62740]: DEBUG nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Build of instance 6b804847-a51d-4e01-90a9-bd3f8116f95f was re-scheduled: Binding failed for port 9b507713-e727-4c28-8059-3547a8442e1e, please check neutron logs for more information. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 575.965156] env[62740]: DEBUG nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 575.965156] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Acquiring lock "refresh_cache-6b804847-a51d-4e01-90a9-bd3f8116f95f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.965156] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Acquired lock "refresh_cache-6b804847-a51d-4e01-90a9-bd3f8116f95f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.965579] env[62740]: DEBUG nova.network.neutron [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 575.965579] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg e27bae1f25ae4f1395ddfafcc8600d56 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 575.977244] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e27bae1f25ae4f1395ddfafcc8600d56 [ 576.043949] env[62740]: DEBUG nova.network.neutron [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.318423] env[62740]: DEBUG nova.network.neutron [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.318423] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg d0d41019ee5a47ef9822ba78ff287748 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 576.332479] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0d41019ee5a47ef9822ba78ff287748 [ 576.332479] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Releasing lock "refresh_cache-6b804847-a51d-4e01-90a9-bd3f8116f95f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.332479] env[62740]: DEBUG nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 576.332479] env[62740]: DEBUG nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 576.332479] env[62740]: DEBUG nova.network.neutron [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 576.549308] env[62740]: DEBUG nova.network.neutron [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.549308] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg 26395a7d555443c8b53ee5e2a13da4cb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 576.558779] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26395a7d555443c8b53ee5e2a13da4cb [ 576.562913] env[62740]: DEBUG nova.network.neutron [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.562913] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg 974bd5d9a1c34644bec150c2faa8b6fe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 576.573626] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 974bd5d9a1c34644bec150c2faa8b6fe [ 576.574648] env[62740]: INFO nova.compute.manager [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] [instance: 6b804847-a51d-4e01-90a9-bd3f8116f95f] Took 0.24 seconds to deallocate network for instance. [ 576.577442] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg 87b0f36b1591460b9d42233f5121257b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 576.646910] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87b0f36b1591460b9d42233f5121257b [ 576.652904] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg 89dde33cec474a6d8d3a932dfe55a4f9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 576.699470] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89dde33cec474a6d8d3a932dfe55a4f9 [ 576.735538] env[62740]: INFO nova.scheduler.client.report [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Deleted allocations for instance 6b804847-a51d-4e01-90a9-bd3f8116f95f [ 576.741621] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Expecting reply to msg aea5a85885f6462daf6f637893256373 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 576.781929] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aea5a85885f6462daf6f637893256373 [ 576.782602] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1eece817-5f05-49e9-ac91-094c367d140f tempest-ServerDiagnosticsTest-873405019 tempest-ServerDiagnosticsTest-873405019-project-member] Lock "6b804847-a51d-4e01-90a9-bd3f8116f95f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.563s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.834574] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Acquiring lock "53f7fc38-a541-4843-883d-da7311445fe5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.834803] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Lock "53f7fc38-a541-4843-883d-da7311445fe5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.835333] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg 0fd5baee7a194778bd1d6046120be207 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 576.858961] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fd5baee7a194778bd1d6046120be207 [ 576.858961] env[62740]: DEBUG nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 576.858961] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg dbb25e125d834d8ca434c96f99f7c4ae in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 576.908350] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbb25e125d834d8ca434c96f99f7c4ae [ 576.931979] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.932556] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.933760] env[62740]: INFO nova.compute.claims [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 576.936688] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg ad54913bbd244e418af0da4d48ce03dc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 576.979495] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad54913bbd244e418af0da4d48ce03dc [ 576.981075] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg 203810e2c5f044ec954dacccad4e13bb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 576.990649] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 203810e2c5f044ec954dacccad4e13bb [ 577.024497] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ed0cae-050c-4fa1-8575-f58e2bbe3b8f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.037487] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edacd4da-3092-4c41-a86a-596295fa4d77 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.070470] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6881f309-7539-4cff-85d6-eb5c8fcfbd65 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.078292] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9341f3c0-c283-4b47-a589-866d65f8c4d1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.092816] env[62740]: DEBUG nova.compute.provider_tree [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 577.093327] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg bf502a7ff9c743238aa5be91c5b26c7f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 577.105876] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf502a7ff9c743238aa5be91c5b26c7f [ 577.106899] env[62740]: DEBUG nova.scheduler.client.report [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 577.109434] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg 8b59fab2386549e98c372c89cd139ee4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 577.125934] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b59fab2386549e98c372c89cd139ee4 [ 577.126854] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.194s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.127592] env[62740]: DEBUG nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 577.130347] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg 48b800762485471fbe5eee8e8e222496 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 577.184031] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48b800762485471fbe5eee8e8e222496 [ 577.184803] env[62740]: DEBUG nova.compute.utils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 577.186257] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg 63aa88a64c29426a93adfd30713950f0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 577.186584] env[62740]: DEBUG nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 577.186928] env[62740]: DEBUG nova.network.neutron [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 577.200482] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63aa88a64c29426a93adfd30713950f0 [ 577.201272] env[62740]: DEBUG nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 577.203018] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg bdf3d9350b0746958d4881e062012830 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 577.260046] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bdf3d9350b0746958d4881e062012830 [ 577.263894] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg d3311f23789e4fa0a4c041ee89e2f182 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 577.328420] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3311f23789e4fa0a4c041ee89e2f182 [ 577.330012] env[62740]: DEBUG nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 577.382028] env[62740]: DEBUG nova.virt.hardware [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 577.382028] env[62740]: DEBUG nova.virt.hardware [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 577.382723] env[62740]: DEBUG nova.virt.hardware [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 577.387128] env[62740]: DEBUG nova.virt.hardware [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 577.387304] env[62740]: DEBUG nova.virt.hardware [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 577.387467] env[62740]: DEBUG nova.virt.hardware [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 577.387701] env[62740]: DEBUG nova.virt.hardware [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 577.387865] env[62740]: DEBUG nova.virt.hardware [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 577.388045] env[62740]: DEBUG nova.virt.hardware [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 577.388235] env[62740]: DEBUG nova.virt.hardware [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 577.388476] env[62740]: DEBUG nova.virt.hardware [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 577.389923] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a84058e6-6ac8-4178-b516-b53c07e7506c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.398993] env[62740]: DEBUG nova.policy [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b94fe95dc67487c81decc5ca0a65db9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48004a3f53e4434a95b02779399fc7dc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 577.409516] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9674fbb1-6803-4e52-b6e0-6ad51b4a0202 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.651721] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Acquiring lock "61b01264-eb0f-410c-8b39-971b95d16bb9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.652085] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Lock "61b01264-eb0f-410c-8b39-971b95d16bb9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.652436] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg 9556fc8a51a24f40880a0b14adc61bcf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 578.662966] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9556fc8a51a24f40880a0b14adc61bcf [ 578.663446] env[62740]: DEBUG nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 578.665081] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg 70a9bf33edb4437cbd1f490474146ea3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 578.698170] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70a9bf33edb4437cbd1f490474146ea3 [ 578.725648] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.726409] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.728190] env[62740]: INFO nova.compute.claims [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 578.730206] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg b34257be04444b50813f8f831bcb8c01 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 578.779116] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b34257be04444b50813f8f831bcb8c01 [ 578.780980] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg 2a322b62b8614584a9206125b2448449 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 578.800840] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a322b62b8614584a9206125b2448449 [ 578.869989] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa3ce0a-8771-408a-a736-95b7f9489120 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.877657] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a50be5-69b9-4ae6-a13d-0f558a95727a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.913334] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2506f6-b470-4cd9-84f6-92c7b0c0655e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.924354] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b98596-d127-47c7-ab45-7794f0a870c8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.946800] env[62740]: DEBUG nova.compute.provider_tree [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.947356] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg a532dfe557624915b0985d25128b6c3b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 578.961054] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a532dfe557624915b0985d25128b6c3b [ 578.962583] env[62740]: DEBUG nova.scheduler.client.report [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 578.964889] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg 12dc7e49da5c425b95b3588e680ba486 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 578.982344] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12dc7e49da5c425b95b3588e680ba486 [ 578.983096] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.257s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.983629] env[62740]: DEBUG nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 578.985299] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg 5e8297aac6f54337bdeeb0255eff53d9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 579.047313] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e8297aac6f54337bdeeb0255eff53d9 [ 579.048766] env[62740]: DEBUG nova.compute.utils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 579.049453] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg 4bf855c1de114728943337b2a06d383b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 579.053226] env[62740]: DEBUG nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 579.053226] env[62740]: DEBUG nova.network.neutron [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 579.066052] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bf855c1de114728943337b2a06d383b [ 579.066727] env[62740]: DEBUG nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 579.068694] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg cb7f647b609644839b5201e4db93a99a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 579.112852] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb7f647b609644839b5201e4db93a99a [ 579.115695] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg afb56054bbc04272bd27b7bd16b08fbd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 579.158640] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afb56054bbc04272bd27b7bd16b08fbd [ 579.160106] env[62740]: DEBUG nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 579.197371] env[62740]: DEBUG nova.virt.hardware [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 579.197601] env[62740]: DEBUG nova.virt.hardware [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 579.197757] env[62740]: DEBUG nova.virt.hardware [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 579.198057] env[62740]: DEBUG nova.virt.hardware [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 579.198375] env[62740]: DEBUG nova.virt.hardware [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 579.198375] env[62740]: DEBUG nova.virt.hardware [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 579.198521] env[62740]: DEBUG nova.virt.hardware [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 579.198667] env[62740]: DEBUG nova.virt.hardware [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 579.198835] env[62740]: DEBUG nova.virt.hardware [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 579.198999] env[62740]: DEBUG nova.virt.hardware [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 579.199357] env[62740]: DEBUG nova.virt.hardware [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 579.200483] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a47d9099-9383-4ee4-bdb5-9bf97f78cdfa {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.212890] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afafee1b-658c-41dc-afe5-eca5484dddd0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.748039] env[62740]: DEBUG nova.network.neutron [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Successfully created port: 89db75e9-e46e-4868-a03f-d0d171a5e2c1 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 579.917298] env[62740]: DEBUG nova.policy [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '420e8baaf7c24571a3910e8636847f24', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68a2e576021a4834a3317105f2a30763', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 582.169820] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 582.170273] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 582.170365] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 582.170481] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 582.171114] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 074830edbb774f0f81412f6c88e7f5d4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 582.194999] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 074830edbb774f0f81412f6c88e7f5d4 [ 582.196317] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 582.196490] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 582.196730] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 582.197218] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 582.198319] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 582.198319] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 582.198319] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 582.198319] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 582.199141] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_power_states {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 582.200539] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f25877fa94c84595811c7a4694243cbe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 582.217500] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f25877fa94c84595811c7a4694243cbe [ 582.217801] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Getting list of instances from cluster (obj){ [ 582.217801] env[62740]: value = "domain-c8" [ 582.217801] env[62740]: _type = "ClusterComputeResource" [ 582.217801] env[62740]: } {{(pid=62740) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 582.220782] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054baba9-b482-4e6a-b5be-c3f183e86917 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.232835] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Got total of 0 instances {{(pid=62740) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 582.232904] env[62740]: WARNING nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] While synchronizing instance power states, found 2 instances in the database and 0 instances on the hypervisor. [ 582.233357] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 53f7fc38-a541-4843-883d-da7311445fe5 {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 582.233357] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 61b01264-eb0f-410c-8b39-971b95d16bb9 {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 582.233483] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "53f7fc38-a541-4843-883d-da7311445fe5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.233705] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "61b01264-eb0f-410c-8b39-971b95d16bb9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.234761] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 582.234761] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 582.234761] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 582.235323] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg df5590e309624572acf69896039c5435 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 582.249079] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df5590e309624572acf69896039c5435 [ 582.250253] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.250253] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.250253] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.250253] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 582.251584] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc6c874-86e9-4831-9406-fff899f9585c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.262435] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262d5a64-5d3f-4f6b-a407-d40cd8a4b759 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.282797] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a34c364-fa00-45c5-a385-28b94a18393d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.290209] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f144b5ee-8074-482c-ae06-3b8ea87d3069 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.324302] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181693MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 582.324399] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.324819] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.325926] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 962bc61e648a4809ba9793ec644cb478 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 582.345689] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 962bc61e648a4809ba9793ec644cb478 [ 582.347458] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg feeb19d0380444969186dcd5a99dc94f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 582.366714] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg feeb19d0380444969186dcd5a99dc94f [ 582.394064] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 53f7fc38-a541-4843-883d-da7311445fe5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 582.394344] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 61b01264-eb0f-410c-8b39-971b95d16bb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 582.394496] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 582.394953] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=768MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 582.447080] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6577c4a-c99f-423b-bfbd-206298e37887 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.457032] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f1f811-4410-4b19-addc-8ed49d081eb9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.492786] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e17e2a4-1710-42ea-ac45-e9d8f29e1b3d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.500864] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26dd80a-b84a-47d4-8fc0-49d494a2e170 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.515261] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 582.515456] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg e24e9080208744dfa7c3ac6176a9baa4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 582.530790] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e24e9080208744dfa7c3ac6176a9baa4 [ 582.531758] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 582.536409] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg d434864e0ede40a7b3b45ce3d7c2ac09 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 582.556102] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d434864e0ede40a7b3b45ce3d7c2ac09 [ 582.557022] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 582.557173] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.233s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.557372] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 582.557724] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Getting list of instances from cluster (obj){ [ 582.557724] env[62740]: value = "domain-c8" [ 582.557724] env[62740]: _type = "ClusterComputeResource" [ 582.557724] env[62740]: } {{(pid=62740) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 582.558999] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ec04e3-ff33-4bb4-ba20-09421f2a0305 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.570038] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Got total of 0 instances {{(pid=62740) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 583.103733] env[62740]: DEBUG nova.network.neutron [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Successfully created port: 9b017a97-fb78-47ef-902a-023312c318b7 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 584.978380] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Acquiring lock "9d175573-2af2-4f66-98cd-411d10f749f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.978696] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Lock "9d175573-2af2-4f66-98cd-411d10f749f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.981458] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg bfe7cffbdeae49de893f14e6c8bbce3d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 584.996607] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfe7cffbdeae49de893f14e6c8bbce3d [ 584.997130] env[62740]: DEBUG nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 584.998992] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg 723712b3e0d9410a8039724f44afeee2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 585.048021] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 723712b3e0d9410a8039724f44afeee2 [ 585.074504] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.074777] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.077892] env[62740]: INFO nova.compute.claims [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 585.081451] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg 09ce8e09c8104ece95bb53f6c2b30f99 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 585.141954] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09ce8e09c8104ece95bb53f6c2b30f99 [ 585.141954] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg 4a4df898b3d74741a02cb08ebca109b0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 585.158547] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a4df898b3d74741a02cb08ebca109b0 [ 585.252634] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce3096b-95dd-465b-bf60-d4b09bc52aa5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.262804] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73efc25b-1781-4f99-b266-16d707614b74 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.306680] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec803884-c6f1-44e7-8ed2-145d2a84551b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.314861] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6627bc0-9350-42ed-9ca6-d12a4fd87960 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.329351] env[62740]: DEBUG nova.compute.provider_tree [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.329874] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg e71c5a1c08b44c8093457f6d96f8382d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 585.344580] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e71c5a1c08b44c8093457f6d96f8382d [ 585.347018] env[62740]: DEBUG nova.scheduler.client.report [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 585.351607] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg 242a3a7617164b3facc3dc806c93cdd0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 585.368050] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 242a3a7617164b3facc3dc806c93cdd0 [ 585.369138] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.294s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.369650] env[62740]: DEBUG nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 585.371767] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg ce587c9f7c7f4e938d95b017faffb0ea in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 585.428339] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce587c9f7c7f4e938d95b017faffb0ea [ 585.430043] env[62740]: DEBUG nova.compute.utils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 585.430835] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg aa21d52459f84ba0b0c8f6298821d15c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 585.434757] env[62740]: DEBUG nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 585.434757] env[62740]: DEBUG nova.network.neutron [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 585.450512] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa21d52459f84ba0b0c8f6298821d15c [ 585.450512] env[62740]: DEBUG nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 585.453728] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg dbcc091114794394824224003f40fb82 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 585.505245] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbcc091114794394824224003f40fb82 [ 585.509350] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg 428a201ffe6545c685fa9b6c4e2a8980 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 585.549828] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 428a201ffe6545c685fa9b6c4e2a8980 [ 585.552110] env[62740]: DEBUG nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 585.593591] env[62740]: DEBUG nova.virt.hardware [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 585.593591] env[62740]: DEBUG nova.virt.hardware [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 585.593771] env[62740]: DEBUG nova.virt.hardware [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 585.594479] env[62740]: DEBUG nova.virt.hardware [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 585.595349] env[62740]: DEBUG nova.virt.hardware [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 585.596172] env[62740]: DEBUG nova.virt.hardware [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 585.596172] env[62740]: DEBUG nova.virt.hardware [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 585.596172] env[62740]: DEBUG nova.virt.hardware [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 585.596658] env[62740]: DEBUG nova.virt.hardware [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 585.596903] env[62740]: DEBUG nova.virt.hardware [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 585.597111] env[62740]: DEBUG nova.virt.hardware [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 585.597989] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83803b6c-70bf-46fa-a075-4b6c9bc6f6f5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.607726] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43fc57c6-d9c4-45b8-abd5-91797b58ed2e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.906964] env[62740]: DEBUG nova.policy [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '250e3c5f6b9c46f3ac67461a35605ed3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f8758e87377404eb222264dba749e83', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 586.364008] env[62740]: DEBUG nova.network.neutron [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Successfully updated port: 89db75e9-e46e-4868-a03f-d0d171a5e2c1 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 586.364008] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg 63293d6111f64e5d88c43a071b2ae46b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 586.382020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63293d6111f64e5d88c43a071b2ae46b [ 586.385017] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Acquiring lock "refresh_cache-53f7fc38-a541-4843-883d-da7311445fe5" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.385017] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Acquired lock "refresh_cache-53f7fc38-a541-4843-883d-da7311445fe5" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.385017] env[62740]: DEBUG nova.network.neutron [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 586.385017] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg 26c7b87f1c4e4dac858eee424c158e08 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 586.398021] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26c7b87f1c4e4dac858eee424c158e08 [ 586.532677] env[62740]: DEBUG nova.network.neutron [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 587.489712] env[62740]: DEBUG nova.network.neutron [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Updating instance_info_cache with network_info: [{"id": "89db75e9-e46e-4868-a03f-d0d171a5e2c1", "address": "fa:16:3e:ec:ae:59", "network": {"id": "71df833e-f099-40c7-9b1c-c96f8715ef40", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1619380618-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48004a3f53e4434a95b02779399fc7dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89db75e9-e4", "ovs_interfaceid": "89db75e9-e46e-4868-a03f-d0d171a5e2c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.490283] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg 72f5a64c7c4e40909ef4f0649256e7f3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 587.509222] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72f5a64c7c4e40909ef4f0649256e7f3 [ 587.509926] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Releasing lock "refresh_cache-53f7fc38-a541-4843-883d-da7311445fe5" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.510439] env[62740]: DEBUG nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Instance network_info: |[{"id": "89db75e9-e46e-4868-a03f-d0d171a5e2c1", "address": "fa:16:3e:ec:ae:59", "network": {"id": "71df833e-f099-40c7-9b1c-c96f8715ef40", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1619380618-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48004a3f53e4434a95b02779399fc7dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89db75e9-e4", "ovs_interfaceid": "89db75e9-e46e-4868-a03f-d0d171a5e2c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 587.511394] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:ae:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27abaf31-0f39-428c-a8d3-cd7548de6818', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89db75e9-e46e-4868-a03f-d0d171a5e2c1', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 587.525883] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 587.526537] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a97acd08-9346-489a-af3b-58627e275791 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.540119] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Created folder: OpenStack in parent group-v4. [ 587.540119] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Creating folder: Project (48004a3f53e4434a95b02779399fc7dc). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 587.540119] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92a1a7c4-d5ce-41c3-9cf0-b2bf83c5bc52 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.554369] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Created folder: Project (48004a3f53e4434a95b02779399fc7dc) in parent group-v156037. [ 587.554645] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Creating folder: Instances. Parent ref: group-v156038. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 587.554818] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1b5c4a9-d92c-4a70-8ae1-1ab2c11f490f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.564025] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Created folder: Instances in parent group-v156038. [ 587.564924] env[62740]: DEBUG oslo.service.loopingcall [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 587.564924] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 587.564924] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f086bebc-7293-4b9e-aaf7-2c6b96b0fede {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.589076] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 587.589076] env[62740]: value = "task-640037" [ 587.589076] env[62740]: _type = "Task" [ 587.589076] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.600141] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640037, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.665323] env[62740]: DEBUG nova.network.neutron [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Successfully updated port: 9b017a97-fb78-47ef-902a-023312c318b7 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 587.665323] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg 63f6e58e09e740f1be6d18c92e271650 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 587.685577] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63f6e58e09e740f1be6d18c92e271650 [ 587.686416] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Acquiring lock "refresh_cache-61b01264-eb0f-410c-8b39-971b95d16bb9" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.686546] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Acquired lock "refresh_cache-61b01264-eb0f-410c-8b39-971b95d16bb9" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.686988] env[62740]: DEBUG nova.network.neutron [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 587.687108] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg b895847e6ccd457a974753a74caa2fc7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 587.697444] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b895847e6ccd457a974753a74caa2fc7 [ 588.099080] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640037, 'name': CreateVM_Task, 'duration_secs': 0.31318} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.099231] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 588.127786] env[62740]: DEBUG oslo_vmware.service [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160de038-8ed8-4169-89c3-2f6b5b10b9d5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.134630] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.134630] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.135300] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 588.135570] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-197ecd08-1a33-4ef0-be0d-c30bed0b4679 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.140607] env[62740]: DEBUG oslo_vmware.api [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Waiting for the task: (returnval){ [ 588.140607] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]527b2ebe-1237-8aa9-e323-4be51d018da9" [ 588.140607] env[62740]: _type = "Task" [ 588.140607] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.148192] env[62740]: DEBUG oslo_vmware.api [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]527b2ebe-1237-8aa9-e323-4be51d018da9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.215158] env[62740]: DEBUG nova.network.neutron [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 588.655884] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.656155] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 588.656679] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.656679] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.657293] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 588.657556] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5bf99023-1e0b-4b7f-8190-f71d22ce8f77 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.667436] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 588.668060] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 588.668812] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd296058-2312-4463-9b78-226ae16013f7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.678406] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d146e813-c810-499e-b635-636a9b96c9df {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.686552] env[62740]: DEBUG oslo_vmware.api [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Waiting for the task: (returnval){ [ 588.686552] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]525a58ac-0521-157d-46fe-62f597785429" [ 588.686552] env[62740]: _type = "Task" [ 588.686552] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.694156] env[62740]: DEBUG oslo_vmware.api [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]525a58ac-0521-157d-46fe-62f597785429, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.809518] env[62740]: DEBUG nova.network.neutron [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Successfully created port: 136a594e-2108-4bbf-9bd6-f36d7263aedb {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 589.202306] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 589.202570] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Creating directory with path [datastore2] vmware_temp/e6f0bcf4-49dd-4bf5-a75c-d3fe5842137c/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 589.202807] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccd59a1c-bfed-4edb-961c-27a1b3c95cd9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.232675] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Created directory with path [datastore2] vmware_temp/e6f0bcf4-49dd-4bf5-a75c-d3fe5842137c/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 589.232906] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Fetch image to [datastore2] vmware_temp/e6f0bcf4-49dd-4bf5-a75c-d3fe5842137c/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 589.233109] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/e6f0bcf4-49dd-4bf5-a75c-d3fe5842137c/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 589.234010] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2feb151a-eed5-4fbc-b81f-74e7d2b8b6ea {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.243732] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e31f1f2c-e2f9-47e6-8457-72756f6acafe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.255528] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb93163d-5514-46ee-b1db-ae1c182eef2a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.291664] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ccabc30-2ab4-4199-b1c7-3b0530af2622 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.298857] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5404db79-83da-4f9f-8dc1-407abd7ada72 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.324616] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 589.429577] env[62740]: DEBUG oslo_vmware.rw_handles [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6f0bcf4-49dd-4bf5-a75c-d3fe5842137c/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 589.511911] env[62740]: DEBUG oslo_vmware.rw_handles [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 589.512255] env[62740]: DEBUG oslo_vmware.rw_handles [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6f0bcf4-49dd-4bf5-a75c-d3fe5842137c/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 589.536683] env[62740]: DEBUG nova.network.neutron [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Updating instance_info_cache with network_info: [{"id": "9b017a97-fb78-47ef-902a-023312c318b7", "address": "fa:16:3e:bb:7d:ef", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b017a97-fb", "ovs_interfaceid": "9b017a97-fb78-47ef-902a-023312c318b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.538639] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg 69c006ece6264250907ceca88f136c07 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 589.562591] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69c006ece6264250907ceca88f136c07 [ 589.563668] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Releasing lock "refresh_cache-61b01264-eb0f-410c-8b39-971b95d16bb9" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.563961] env[62740]: DEBUG nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Instance network_info: |[{"id": "9b017a97-fb78-47ef-902a-023312c318b7", "address": "fa:16:3e:bb:7d:ef", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b017a97-fb", "ovs_interfaceid": "9b017a97-fb78-47ef-902a-023312c318b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 589.564946] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:7d:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9b017a97-fb78-47ef-902a-023312c318b7', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 589.574778] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Creating folder: Project (68a2e576021a4834a3317105f2a30763). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 589.575264] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-119bdf35-182c-47fa-9609-383e172317de {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.591229] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Created folder: Project (68a2e576021a4834a3317105f2a30763) in parent group-v156037. [ 589.591369] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Creating folder: Instances. Parent ref: group-v156041. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 589.593025] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2482b22-2a81-4050-861b-11dafa84355b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.606599] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Created folder: Instances in parent group-v156041. [ 589.606599] env[62740]: DEBUG oslo.service.loopingcall [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 589.606599] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 589.606599] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f94305e7-4a61-42f9-9cab-60369b3eb8fe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.628700] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 589.628700] env[62740]: value = "task-640040" [ 589.628700] env[62740]: _type = "Task" [ 589.628700] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.641632] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640040, 'name': CreateVM_Task} progress is 5%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.140026] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640040, 'name': CreateVM_Task, 'duration_secs': 0.306427} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.140651] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 590.141427] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.141710] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.142121] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 590.142466] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf771018-f13c-44fa-82ce-b1918f5fd054 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.148048] env[62740]: DEBUG oslo_vmware.api [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Waiting for the task: (returnval){ [ 590.148048] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52b3a032-2340-c2d6-21e4-2f742d931218" [ 590.148048] env[62740]: _type = "Task" [ 590.148048] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.156890] env[62740]: DEBUG oslo_vmware.api [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52b3a032-2340-c2d6-21e4-2f742d931218, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.664434] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.664852] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 590.665212] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.978947] env[62740]: DEBUG nova.compute.manager [req-bebc8ac2-d511-4776-8723-446f097da19f req-46e4fd45-7e66-4151-9a5e-6855535f3577 service nova] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Received event network-vif-plugged-89db75e9-e46e-4868-a03f-d0d171a5e2c1 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 590.978947] env[62740]: DEBUG oslo_concurrency.lockutils [req-bebc8ac2-d511-4776-8723-446f097da19f req-46e4fd45-7e66-4151-9a5e-6855535f3577 service nova] Acquiring lock "53f7fc38-a541-4843-883d-da7311445fe5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.979162] env[62740]: DEBUG oslo_concurrency.lockutils [req-bebc8ac2-d511-4776-8723-446f097da19f req-46e4fd45-7e66-4151-9a5e-6855535f3577 service nova] Lock "53f7fc38-a541-4843-883d-da7311445fe5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.982382] env[62740]: DEBUG oslo_concurrency.lockutils [req-bebc8ac2-d511-4776-8723-446f097da19f req-46e4fd45-7e66-4151-9a5e-6855535f3577 service nova] Lock "53f7fc38-a541-4843-883d-da7311445fe5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.982382] env[62740]: DEBUG nova.compute.manager [req-bebc8ac2-d511-4776-8723-446f097da19f req-46e4fd45-7e66-4151-9a5e-6855535f3577 service nova] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] No waiting events found dispatching network-vif-plugged-89db75e9-e46e-4868-a03f-d0d171a5e2c1 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 590.982382] env[62740]: WARNING nova.compute.manager [req-bebc8ac2-d511-4776-8723-446f097da19f req-46e4fd45-7e66-4151-9a5e-6855535f3577 service nova] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Received unexpected event network-vif-plugged-89db75e9-e46e-4868-a03f-d0d171a5e2c1 for instance with vm_state building and task_state spawning. [ 591.362559] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Acquiring lock "b326be2c-43f2-4f04-9652-cec7e017288e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.362559] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Lock "b326be2c-43f2-4f04-9652-cec7e017288e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.362559] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg f03627515da14657957215988627262e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 591.382020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f03627515da14657957215988627262e [ 591.382668] env[62740]: DEBUG nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 591.384616] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 94415947871748db836e7c6043e9315f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 591.408164] env[62740]: DEBUG nova.compute.manager [req-8cb12a00-3eb7-4da7-9fe4-ba7980d8dc47 req-eade2556-0e2e-40f9-b43b-d6082d05105f service nova] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Received event network-vif-plugged-9b017a97-fb78-47ef-902a-023312c318b7 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 591.408401] env[62740]: DEBUG oslo_concurrency.lockutils [req-8cb12a00-3eb7-4da7-9fe4-ba7980d8dc47 req-eade2556-0e2e-40f9-b43b-d6082d05105f service nova] Acquiring lock "61b01264-eb0f-410c-8b39-971b95d16bb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.408770] env[62740]: DEBUG oslo_concurrency.lockutils [req-8cb12a00-3eb7-4da7-9fe4-ba7980d8dc47 req-eade2556-0e2e-40f9-b43b-d6082d05105f service nova] Lock "61b01264-eb0f-410c-8b39-971b95d16bb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.409730] env[62740]: DEBUG oslo_concurrency.lockutils [req-8cb12a00-3eb7-4da7-9fe4-ba7980d8dc47 req-eade2556-0e2e-40f9-b43b-d6082d05105f service nova] Lock "61b01264-eb0f-410c-8b39-971b95d16bb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.409827] env[62740]: DEBUG nova.compute.manager [req-8cb12a00-3eb7-4da7-9fe4-ba7980d8dc47 req-eade2556-0e2e-40f9-b43b-d6082d05105f service nova] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] No waiting events found dispatching network-vif-plugged-9b017a97-fb78-47ef-902a-023312c318b7 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 591.410017] env[62740]: WARNING nova.compute.manager [req-8cb12a00-3eb7-4da7-9fe4-ba7980d8dc47 req-eade2556-0e2e-40f9-b43b-d6082d05105f service nova] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Received unexpected event network-vif-plugged-9b017a97-fb78-47ef-902a-023312c318b7 for instance with vm_state building and task_state spawning. [ 591.445371] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94415947871748db836e7c6043e9315f [ 591.469680] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.469832] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.473094] env[62740]: INFO nova.compute.claims [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 591.474814] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 7c17179348b74b8da04aba5e5620a9fd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 591.514161] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c17179348b74b8da04aba5e5620a9fd [ 591.515889] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 1d9c0a1d692247ec889aeba4ce80c6a1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 591.526207] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d9c0a1d692247ec889aeba4ce80c6a1 [ 591.600257] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5de8862-2019-4bc8-93cf-9a0e59ea89ee {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.608906] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7af4534-2d16-4170-b0db-4f6665413a65 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.642201] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092dcf2f-81aa-42b2-98d2-79db0ff39f7f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.649838] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ba86a6-21c1-4693-a223-2a9c6eaa0d50 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.666306] env[62740]: DEBUG nova.compute.provider_tree [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.666819] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 909de186c4fd48b1a05aa515fbb35265 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 591.677533] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 909de186c4fd48b1a05aa515fbb35265 [ 591.678551] env[62740]: DEBUG nova.scheduler.client.report [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 591.680977] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 3380d693b2df4a68b371bfcdc0ca67c4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 591.697504] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3380d693b2df4a68b371bfcdc0ca67c4 [ 591.698537] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.228s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.698869] env[62740]: DEBUG nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 591.701083] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 25358700cac946a98e3e9fb7fee6531d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 591.752801] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25358700cac946a98e3e9fb7fee6531d [ 591.754133] env[62740]: DEBUG nova.compute.utils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 591.754721] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg b5a4d7850ecd429385526cf92e03162a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 591.759674] env[62740]: DEBUG nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 591.759776] env[62740]: DEBUG nova.network.neutron [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 591.774322] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5a4d7850ecd429385526cf92e03162a [ 591.775271] env[62740]: DEBUG nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 591.778171] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 7deed93d476b4f0b8bcc45e2494cbf43 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 591.814015] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7deed93d476b4f0b8bcc45e2494cbf43 [ 591.814685] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 6698e5c0cf784c78b7b39d1abb1ee3c9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 591.857543] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6698e5c0cf784c78b7b39d1abb1ee3c9 [ 591.859199] env[62740]: DEBUG nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 591.900766] env[62740]: DEBUG nova.virt.hardware [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 591.901181] env[62740]: DEBUG nova.virt.hardware [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 591.901448] env[62740]: DEBUG nova.virt.hardware [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 591.901735] env[62740]: DEBUG nova.virt.hardware [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 591.901837] env[62740]: DEBUG nova.virt.hardware [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 591.901989] env[62740]: DEBUG nova.virt.hardware [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 591.902219] env[62740]: DEBUG nova.virt.hardware [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 591.902381] env[62740]: DEBUG nova.virt.hardware [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 591.902552] env[62740]: DEBUG nova.virt.hardware [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 591.902779] env[62740]: DEBUG nova.virt.hardware [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 591.902982] env[62740]: DEBUG nova.virt.hardware [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 591.904632] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec0891d-853e-48f7-b773-e03741d0d0f1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.913814] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f173c73-a718-41ac-87be-6b8acf144e3c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.031852] env[62740]: DEBUG nova.policy [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b4ec1fd49944a2d8d4605e5c95d935c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c12d94f17bff48b48f03a4cf2c692c3a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 592.894298] env[62740]: DEBUG nova.network.neutron [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Successfully updated port: 136a594e-2108-4bbf-9bd6-f36d7263aedb {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 592.894298] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg ce9fe26f4aba40f4a1e3d100df89965e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 592.907544] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce9fe26f4aba40f4a1e3d100df89965e [ 592.908678] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Acquiring lock "refresh_cache-9d175573-2af2-4f66-98cd-411d10f749f0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.909429] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Acquired lock "refresh_cache-9d175573-2af2-4f66-98cd-411d10f749f0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.909429] env[62740]: DEBUG nova.network.neutron [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 592.909429] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg edbc2b2d0a184ad2a73c845f143b482b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 592.918998] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edbc2b2d0a184ad2a73c845f143b482b [ 593.081580] env[62740]: DEBUG nova.network.neutron [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.625590] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "fece072a-baac-4301-988c-0068d6e71cff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.625774] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "fece072a-baac-4301-988c-0068d6e71cff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.626305] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg f1d9bf265d484bc8bf7b56e1a141e1b6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 593.641323] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1d9bf265d484bc8bf7b56e1a141e1b6 [ 593.641323] env[62740]: DEBUG nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 593.641722] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg fdb4b0d62b784edbad82eade1df77f6b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 593.689425] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdb4b0d62b784edbad82eade1df77f6b [ 593.714396] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.714638] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.716208] env[62740]: INFO nova.compute.claims [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 593.717945] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 3c12ac70977b4885887db971fca689a3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 593.775649] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c12ac70977b4885887db971fca689a3 [ 593.778339] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg bf6c4f56d02242d6bb8b4e80769fc3c4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 593.795613] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf6c4f56d02242d6bb8b4e80769fc3c4 [ 593.800402] env[62740]: DEBUG nova.network.neutron [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Successfully created port: 78c7aabd-abfc-4814-8f7d-90ad8943739f {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 593.895090] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7d5833-7a71-4646-abf2-6117eaa3af1e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.903333] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7ce178-fa59-42d8-accf-3ed8f85f9498 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.936981] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e4cb37-bd32-4ed3-ae2d-a8e03c836a78 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.947146] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f57aac-85bb-46b5-8af7-0f78571defac {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.961462] env[62740]: DEBUG nova.compute.provider_tree [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 593.962013] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 21a73ad5661440ca8f059641f7cfff40 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 593.974792] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21a73ad5661440ca8f059641f7cfff40 [ 593.976069] env[62740]: DEBUG nova.scheduler.client.report [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 593.979808] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg b8f8a1ca1832404982571efc135447af in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 593.997698] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8f8a1ca1832404982571efc135447af [ 593.998610] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.284s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.999096] env[62740]: DEBUG nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 594.001972] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 6acceb94b3d347ab958b28c85f0dd358 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 594.017519] env[62740]: DEBUG nova.network.neutron [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Updating instance_info_cache with network_info: [{"id": "136a594e-2108-4bbf-9bd6-f36d7263aedb", "address": "fa:16:3e:33:4a:20", "network": {"id": "c706a27c-b24b-4f31-9930-eff5254266f2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-41588659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f8758e87377404eb222264dba749e83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "796c7fcb-00fd-4692-a44b-7ec550201e86", "external-id": "nsx-vlan-transportzone-42", "segmentation_id": 42, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap136a594e-21", "ovs_interfaceid": "136a594e-2108-4bbf-9bd6-f36d7263aedb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.018908] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg ce987de4010040a3b4cd26ab4e264005 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 594.036687] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce987de4010040a3b4cd26ab4e264005 [ 594.038256] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Releasing lock "refresh_cache-9d175573-2af2-4f66-98cd-411d10f749f0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.038460] env[62740]: DEBUG nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Instance network_info: |[{"id": "136a594e-2108-4bbf-9bd6-f36d7263aedb", "address": "fa:16:3e:33:4a:20", "network": {"id": "c706a27c-b24b-4f31-9930-eff5254266f2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-41588659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f8758e87377404eb222264dba749e83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "796c7fcb-00fd-4692-a44b-7ec550201e86", "external-id": "nsx-vlan-transportzone-42", "segmentation_id": 42, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap136a594e-21", "ovs_interfaceid": "136a594e-2108-4bbf-9bd6-f36d7263aedb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 594.039418] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:4a:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '796c7fcb-00fd-4692-a44b-7ec550201e86', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '136a594e-2108-4bbf-9bd6-f36d7263aedb', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 594.050780] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Creating folder: Project (8f8758e87377404eb222264dba749e83). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 594.051929] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c2fd7cc-4b89-4a28-a072-cbd82fa2720e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.067026] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Created folder: Project (8f8758e87377404eb222264dba749e83) in parent group-v156037. [ 594.067026] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Creating folder: Instances. Parent ref: group-v156044. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 594.067026] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24657be1-4735-4baa-bdcc-11b5f405c238 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.077063] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Created folder: Instances in parent group-v156044. [ 594.077409] env[62740]: DEBUG oslo.service.loopingcall [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 594.077653] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 594.077936] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56265066-dae0-4f31-a729-e320103c39e6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.097845] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6acceb94b3d347ab958b28c85f0dd358 [ 594.099474] env[62740]: DEBUG nova.compute.utils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 594.100636] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg b4de92dc32d74edfb02b396756d4e26c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 594.102314] env[62740]: DEBUG nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 594.102550] env[62740]: DEBUG nova.network.neutron [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 594.107495] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 594.107495] env[62740]: value = "task-640043" [ 594.107495] env[62740]: _type = "Task" [ 594.107495] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.118070] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640043, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.123929] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4de92dc32d74edfb02b396756d4e26c [ 594.124627] env[62740]: DEBUG nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 594.126330] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 01d30c2fc0cb4137bbc4979ab88953e9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 594.192838] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01d30c2fc0cb4137bbc4979ab88953e9 [ 594.195914] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg c0b85eebc8dd4271b0ae4d84e3ebac94 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 594.231250] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0b85eebc8dd4271b0ae4d84e3ebac94 [ 594.232409] env[62740]: DEBUG nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 594.261867] env[62740]: DEBUG nova.virt.hardware [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 594.262131] env[62740]: DEBUG nova.virt.hardware [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 594.262329] env[62740]: DEBUG nova.virt.hardware [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 594.262559] env[62740]: DEBUG nova.virt.hardware [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 594.262713] env[62740]: DEBUG nova.virt.hardware [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 594.262863] env[62740]: DEBUG nova.virt.hardware [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 594.267091] env[62740]: DEBUG nova.virt.hardware [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 594.267091] env[62740]: DEBUG nova.virt.hardware [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 594.267091] env[62740]: DEBUG nova.virt.hardware [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 594.267091] env[62740]: DEBUG nova.virt.hardware [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 594.267091] env[62740]: DEBUG nova.virt.hardware [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 594.267926] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61a5b5e-6f80-4001-8692-e20037ae275a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.278189] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297ce214-9d2c-4191-b6dd-82139a767d44 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.480226] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Acquiring lock "3102cc87-df1a-4de8-bfdb-9b904f40ea2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.480545] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Lock "3102cc87-df1a-4de8-bfdb-9b904f40ea2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.481060] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 57a88ba17d1b4853857ef54bcc4e907d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 594.493218] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57a88ba17d1b4853857ef54bcc4e907d [ 594.493996] env[62740]: DEBUG nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 594.495789] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 78d1976f8c9d4d599b64ea7466447ddb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 594.548961] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78d1976f8c9d4d599b64ea7466447ddb [ 594.565517] env[62740]: DEBUG nova.policy [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a9c404870f54f2e8d4d7c7b0df334f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f38588bc783140a38d77f8967add27d0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 594.572883] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.573286] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.574858] env[62740]: INFO nova.compute.claims [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 594.576508] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 6148b488cf324fb6be3c8ebca27b1a45 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 594.625383] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640043, 'name': CreateVM_Task, 'duration_secs': 0.356251} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.625383] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 594.631293] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.631293] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.631293] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 594.631293] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74f20677-166a-4302-b229-ce52ca12a1fa {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.632791] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6148b488cf324fb6be3c8ebca27b1a45 [ 594.635025] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 2402c7803a2d4489a359f755b8e53177 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 594.641019] env[62740]: DEBUG oslo_vmware.api [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Waiting for the task: (returnval){ [ 594.641019] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52df2899-d28c-510b-77a4-f6371aef0c41" [ 594.641019] env[62740]: _type = "Task" [ 594.641019] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.652160] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2402c7803a2d4489a359f755b8e53177 [ 594.653343] env[62740]: DEBUG oslo_vmware.api [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52df2899-d28c-510b-77a4-f6371aef0c41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.768594] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de91d73-5dc5-4835-81d3-3a4d57fdb87c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.777857] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c6aac7-526e-4bcb-bd67-618d3a996c60 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.814288] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4da707a-fd03-4cd8-a3e1-bb24065737cb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.822402] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee448747-de3b-4b27-97b9-dd5398dddea7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.837521] env[62740]: DEBUG nova.compute.provider_tree [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.837798] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 1d3db1e9b8f949b6b60186598c8d32f0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 594.851224] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d3db1e9b8f949b6b60186598c8d32f0 [ 594.852350] env[62740]: DEBUG nova.scheduler.client.report [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 594.855225] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 3d51adb2711d47b4899135aece8b9196 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 594.870413] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d51adb2711d47b4899135aece8b9196 [ 594.871249] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.298s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.871744] env[62740]: DEBUG nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 594.873781] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 4d2dfd43876f43bdaeab6b2619c528c8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 594.914481] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d2dfd43876f43bdaeab6b2619c528c8 [ 594.915859] env[62740]: DEBUG nova.compute.utils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 594.916856] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 84fad52c21f54e09985bf5c53b835a53 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 594.918889] env[62740]: DEBUG nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 594.918889] env[62740]: DEBUG nova.network.neutron [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 594.932954] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84fad52c21f54e09985bf5c53b835a53 [ 594.933586] env[62740]: DEBUG nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 594.935690] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg ca5198138cd44949a5b610c9eaaf8401 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 594.976165] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca5198138cd44949a5b610c9eaaf8401 [ 594.978941] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 20f05e220e484ad3849d3499aae384d7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 595.019724] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20f05e220e484ad3849d3499aae384d7 [ 595.021297] env[62740]: DEBUG nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 595.061090] env[62740]: DEBUG nova.virt.hardware [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 595.061276] env[62740]: DEBUG nova.virt.hardware [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 595.061626] env[62740]: DEBUG nova.virt.hardware [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 595.062357] env[62740]: DEBUG nova.virt.hardware [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 595.062357] env[62740]: DEBUG nova.virt.hardware [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 595.062357] env[62740]: DEBUG nova.virt.hardware [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 595.062357] env[62740]: DEBUG nova.virt.hardware [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 595.062653] env[62740]: DEBUG nova.virt.hardware [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 595.062653] env[62740]: DEBUG nova.virt.hardware [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 595.065052] env[62740]: DEBUG nova.virt.hardware [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 595.065052] env[62740]: DEBUG nova.virt.hardware [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 595.069638] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ae29f0-e0f7-4af7-9c15-1f1329fe280a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.079311] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc6a312f-0347-4134-8708-a194244416a8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.162091] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.162091] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 595.162091] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.323798] env[62740]: DEBUG nova.policy [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ea754c84564745df9f03a408a370aa0f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '965194a3db2448a5a8d75b43a108718d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 595.832686] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Acquiring lock "4f0d1356-bdfb-4cb2-979a-e28f9025b311" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.832955] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Lock "4f0d1356-bdfb-4cb2-979a-e28f9025b311" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.833384] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg e20c7aff572d4e919ef3b4df06a9c722 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 595.845274] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e20c7aff572d4e919ef3b4df06a9c722 [ 595.845798] env[62740]: DEBUG nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 595.847880] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 0c2afed9439643bc98e1d3e84e26c3b8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 595.891135] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c2afed9439643bc98e1d3e84e26c3b8 [ 595.910742] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.911014] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.912849] env[62740]: INFO nova.compute.claims [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.915035] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg b3e19768153545d3afd00c15f8e1d275 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 595.965683] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3e19768153545d3afd00c15f8e1d275 [ 595.968092] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg a6bb1eae5f8542839c52739559e84d07 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 595.983997] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6bb1eae5f8542839c52739559e84d07 [ 596.158444] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68febc56-1216-45d7-9a5d-04bb1efce5ad {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.168564] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a35592-c4de-401a-a39a-8c5a9cf45156 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.210434] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09886fac-b0c0-4dfa-98ea-17973fc45703 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.222343] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49866c4f-c1d5-448b-9683-bbb46e9b9821 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.236244] env[62740]: DEBUG nova.compute.provider_tree [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 596.236772] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 68291bac3c3b41bc90534a9509694e2e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 596.250105] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68291bac3c3b41bc90534a9509694e2e [ 596.251182] env[62740]: DEBUG nova.scheduler.client.report [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 596.253754] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 50136f1ad9e7443ab089a86e77773eda in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 596.266951] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50136f1ad9e7443ab089a86e77773eda [ 596.267820] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.357s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.268370] env[62740]: DEBUG nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 596.271121] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg f24ea868bbb844f7b94caf018e96ea51 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 596.325907] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f24ea868bbb844f7b94caf018e96ea51 [ 596.327397] env[62740]: DEBUG nova.compute.utils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 596.329767] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 18decdfa7cc944dd902674ad6d77de14 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 596.333922] env[62740]: DEBUG nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 596.335073] env[62740]: DEBUG nova.network.neutron [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 596.347489] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18decdfa7cc944dd902674ad6d77de14 [ 596.347984] env[62740]: DEBUG nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 596.350155] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 9f702f4079b542f887a1f307313cbafb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 596.387042] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f702f4079b542f887a1f307313cbafb [ 596.389843] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 575bff72b833476a975fefd45e211fbe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 596.424845] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 575bff72b833476a975fefd45e211fbe [ 596.426135] env[62740]: DEBUG nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 596.460690] env[62740]: DEBUG nova.virt.hardware [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 596.460938] env[62740]: DEBUG nova.virt.hardware [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 596.461172] env[62740]: DEBUG nova.virt.hardware [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 596.461369] env[62740]: DEBUG nova.virt.hardware [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 596.461513] env[62740]: DEBUG nova.virt.hardware [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 596.461654] env[62740]: DEBUG nova.virt.hardware [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 596.461857] env[62740]: DEBUG nova.virt.hardware [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 596.462029] env[62740]: DEBUG nova.virt.hardware [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 596.462200] env[62740]: DEBUG nova.virt.hardware [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 596.462361] env[62740]: DEBUG nova.virt.hardware [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 596.462526] env[62740]: DEBUG nova.virt.hardware [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 596.463730] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a123ad41-0794-40ba-b7aa-15cbfbb7ef1a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.472117] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b921bb-7dda-4ca0-9ad7-e1e4c88bb18e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.618130] env[62740]: DEBUG nova.policy [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ac78c054f2e4f278720392d9897a5e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a29640051052481dace61e0c2197b229', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 596.621747] env[62740]: DEBUG nova.network.neutron [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Successfully created port: 5e56cddb-165b-4421-8e11-e60730241229 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 596.952448] env[62740]: DEBUG nova.network.neutron [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Successfully updated port: 78c7aabd-abfc-4814-8f7d-90ad8943739f {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 596.953273] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 8121a859338c4c39827071da45c1904e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 596.966748] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8121a859338c4c39827071da45c1904e [ 596.967734] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Acquiring lock "refresh_cache-b326be2c-43f2-4f04-9652-cec7e017288e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.967734] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Acquired lock "refresh_cache-b326be2c-43f2-4f04-9652-cec7e017288e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.967861] env[62740]: DEBUG nova.network.neutron [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 596.968220] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 47075675356f45209570262f8d5b21b5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 596.980947] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47075675356f45209570262f8d5b21b5 [ 597.141755] env[62740]: DEBUG nova.network.neutron [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 597.576792] env[62740]: DEBUG nova.network.neutron [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Successfully created port: 3d0464f5-8822-416b-b56f-51afc395017f {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 598.073411] env[62740]: DEBUG nova.network.neutron [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Updating instance_info_cache with network_info: [{"id": "78c7aabd-abfc-4814-8f7d-90ad8943739f", "address": "fa:16:3e:49:78:c3", "network": {"id": "c1ab5dd8-1f3a-4091-af45-4bce90893657", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1720290614-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c12d94f17bff48b48f03a4cf2c692c3a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78c7aabd-ab", "ovs_interfaceid": "78c7aabd-abfc-4814-8f7d-90ad8943739f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.074039] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 68e3175c0a94405180a5cf8c23048b81 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 598.093493] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68e3175c0a94405180a5cf8c23048b81 [ 598.094196] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Releasing lock "refresh_cache-b326be2c-43f2-4f04-9652-cec7e017288e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.094541] env[62740]: DEBUG nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Instance network_info: |[{"id": "78c7aabd-abfc-4814-8f7d-90ad8943739f", "address": "fa:16:3e:49:78:c3", "network": {"id": "c1ab5dd8-1f3a-4091-af45-4bce90893657", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1720290614-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c12d94f17bff48b48f03a4cf2c692c3a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78c7aabd-ab", "ovs_interfaceid": "78c7aabd-abfc-4814-8f7d-90ad8943739f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 598.094968] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:78:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35ac9709-fd8b-4630-897a-68ed629d1b11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78c7aabd-abfc-4814-8f7d-90ad8943739f', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 598.107528] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Creating folder: Project (c12d94f17bff48b48f03a4cf2c692c3a). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 598.108696] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83ddbf55-5a37-4353-91c1-79460a45b2b0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.119616] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Created folder: Project (c12d94f17bff48b48f03a4cf2c692c3a) in parent group-v156037. [ 598.120043] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Creating folder: Instances. Parent ref: group-v156047. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 598.120296] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cddeb515-8d0a-4e1a-a686-c699b4be8f97 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.128996] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Created folder: Instances in parent group-v156047. [ 598.129366] env[62740]: DEBUG oslo.service.loopingcall [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 598.130073] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 598.130073] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22592c95-2d34-46f1-9a9e-4f614d1a5a1e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.151064] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 598.151064] env[62740]: value = "task-640046" [ 598.151064] env[62740]: _type = "Task" [ 598.151064] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.158792] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640046, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.289795] env[62740]: DEBUG nova.network.neutron [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Successfully created port: a8ea17f3-420a-465c-9b70-9b279603a92f {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 598.661375] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640046, 'name': CreateVM_Task, 'duration_secs': 0.314436} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.661549] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 598.662232] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.662397] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.662700] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 598.662964] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88d497ab-6173-45b4-b4ad-9c911bc04508 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.667815] env[62740]: DEBUG oslo_vmware.api [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Waiting for the task: (returnval){ [ 598.667815] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5234dfec-50b4-12ce-64f5-a06e3f31adde" [ 598.667815] env[62740]: _type = "Task" [ 598.667815] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.677703] env[62740]: DEBUG oslo_vmware.api [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5234dfec-50b4-12ce-64f5-a06e3f31adde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.851933] env[62740]: DEBUG nova.compute.manager [req-b69f1a9c-961a-4cf6-8865-05a9c8f04cbd req-6ed9b75d-38f5-4cea-bb96-961f1e13b7e8 service nova] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Received event network-changed-89db75e9-e46e-4868-a03f-d0d171a5e2c1 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 598.851933] env[62740]: DEBUG nova.compute.manager [req-b69f1a9c-961a-4cf6-8865-05a9c8f04cbd req-6ed9b75d-38f5-4cea-bb96-961f1e13b7e8 service nova] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Refreshing instance network info cache due to event network-changed-89db75e9-e46e-4868-a03f-d0d171a5e2c1. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 598.852230] env[62740]: DEBUG oslo_concurrency.lockutils [req-b69f1a9c-961a-4cf6-8865-05a9c8f04cbd req-6ed9b75d-38f5-4cea-bb96-961f1e13b7e8 service nova] Acquiring lock "refresh_cache-53f7fc38-a541-4843-883d-da7311445fe5" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.852230] env[62740]: DEBUG oslo_concurrency.lockutils [req-b69f1a9c-961a-4cf6-8865-05a9c8f04cbd req-6ed9b75d-38f5-4cea-bb96-961f1e13b7e8 service nova] Acquired lock "refresh_cache-53f7fc38-a541-4843-883d-da7311445fe5" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.852408] env[62740]: DEBUG nova.network.neutron [req-b69f1a9c-961a-4cf6-8865-05a9c8f04cbd req-6ed9b75d-38f5-4cea-bb96-961f1e13b7e8 service nova] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Refreshing network info cache for port 89db75e9-e46e-4868-a03f-d0d171a5e2c1 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 598.852895] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-b69f1a9c-961a-4cf6-8865-05a9c8f04cbd req-6ed9b75d-38f5-4cea-bb96-961f1e13b7e8 service nova] Expecting reply to msg c080f876522b49289c77b69bc1cad738 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 598.864787] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c080f876522b49289c77b69bc1cad738 [ 599.179651] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.179945] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 599.180261] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.187885] env[62740]: DEBUG nova.compute.manager [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Received event network-changed-9b017a97-fb78-47ef-902a-023312c318b7 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 599.187885] env[62740]: DEBUG nova.compute.manager [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Refreshing instance network info cache due to event network-changed-9b017a97-fb78-47ef-902a-023312c318b7. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 599.187885] env[62740]: DEBUG oslo_concurrency.lockutils [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] Acquiring lock "refresh_cache-61b01264-eb0f-410c-8b39-971b95d16bb9" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.187885] env[62740]: DEBUG oslo_concurrency.lockutils [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] Acquired lock "refresh_cache-61b01264-eb0f-410c-8b39-971b95d16bb9" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.187885] env[62740]: DEBUG nova.network.neutron [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Refreshing network info cache for port 9b017a97-fb78-47ef-902a-023312c318b7 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 599.188223] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] Expecting reply to msg 8d27fcdbdd4d46efae4744a0d3215089 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 599.202958] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d27fcdbdd4d46efae4744a0d3215089 [ 600.353494] env[62740]: DEBUG nova.network.neutron [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Successfully updated port: 3d0464f5-8822-416b-b56f-51afc395017f {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 600.353981] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 8a6480c7fcad432cb0c98657940ce491 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 600.368048] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a6480c7fcad432cb0c98657940ce491 [ 600.371181] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Acquiring lock "refresh_cache-3102cc87-df1a-4de8-bfdb-9b904f40ea2e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.371181] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Acquired lock "refresh_cache-3102cc87-df1a-4de8-bfdb-9b904f40ea2e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.371181] env[62740]: DEBUG nova.network.neutron [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 600.371181] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg cac027c3858c43a9bc842dd6703ccc33 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 600.380208] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cac027c3858c43a9bc842dd6703ccc33 [ 600.387242] env[62740]: DEBUG nova.network.neutron [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Updated VIF entry in instance network info cache for port 9b017a97-fb78-47ef-902a-023312c318b7. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 600.387242] env[62740]: DEBUG nova.network.neutron [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Updating instance_info_cache with network_info: [{"id": "9b017a97-fb78-47ef-902a-023312c318b7", "address": "fa:16:3e:bb:7d:ef", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b017a97-fb", "ovs_interfaceid": "9b017a97-fb78-47ef-902a-023312c318b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.387363] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] Expecting reply to msg 8785192e861a466d83cb17094d3db9bb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 600.396539] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8785192e861a466d83cb17094d3db9bb [ 600.397377] env[62740]: DEBUG oslo_concurrency.lockutils [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] Releasing lock "refresh_cache-61b01264-eb0f-410c-8b39-971b95d16bb9" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.397831] env[62740]: DEBUG nova.compute.manager [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Received event network-vif-plugged-136a594e-2108-4bbf-9bd6-f36d7263aedb {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 600.398508] env[62740]: DEBUG oslo_concurrency.lockutils [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] Acquiring lock "9d175573-2af2-4f66-98cd-411d10f749f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.398508] env[62740]: DEBUG oslo_concurrency.lockutils [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] Lock "9d175573-2af2-4f66-98cd-411d10f749f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.398671] env[62740]: DEBUG oslo_concurrency.lockutils [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] Lock "9d175573-2af2-4f66-98cd-411d10f749f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.398875] env[62740]: DEBUG nova.compute.manager [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] No waiting events found dispatching network-vif-plugged-136a594e-2108-4bbf-9bd6-f36d7263aedb {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 600.399118] env[62740]: WARNING nova.compute.manager [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Received unexpected event network-vif-plugged-136a594e-2108-4bbf-9bd6-f36d7263aedb for instance with vm_state building and task_state spawning. [ 600.399405] env[62740]: DEBUG nova.compute.manager [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Received event network-changed-136a594e-2108-4bbf-9bd6-f36d7263aedb {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 600.399613] env[62740]: DEBUG nova.compute.manager [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Refreshing instance network info cache due to event network-changed-136a594e-2108-4bbf-9bd6-f36d7263aedb. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 600.399837] env[62740]: DEBUG oslo_concurrency.lockutils [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] Acquiring lock "refresh_cache-9d175573-2af2-4f66-98cd-411d10f749f0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.400020] env[62740]: DEBUG oslo_concurrency.lockutils [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] Acquired lock "refresh_cache-9d175573-2af2-4f66-98cd-411d10f749f0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.400215] env[62740]: DEBUG nova.network.neutron [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Refreshing network info cache for port 136a594e-2108-4bbf-9bd6-f36d7263aedb {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 600.400760] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] Expecting reply to msg f93e3f07dd82478fa9ce6d8b7098e9af in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 600.416715] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f93e3f07dd82478fa9ce6d8b7098e9af [ 600.560677] env[62740]: DEBUG nova.network.neutron [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 600.613690] env[62740]: DEBUG nova.network.neutron [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Successfully updated port: 5e56cddb-165b-4421-8e11-e60730241229 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 600.614118] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg c508a8be211c41b6ab95eadf62a1e92c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 600.624964] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c508a8be211c41b6ab95eadf62a1e92c [ 600.625729] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "refresh_cache-fece072a-baac-4301-988c-0068d6e71cff" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.625854] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquired lock "refresh_cache-fece072a-baac-4301-988c-0068d6e71cff" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.626013] env[62740]: DEBUG nova.network.neutron [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 600.626442] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg fe8caed18df6464a837d875acee28667 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 600.635818] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe8caed18df6464a837d875acee28667 [ 600.778309] env[62740]: DEBUG nova.network.neutron [req-b69f1a9c-961a-4cf6-8865-05a9c8f04cbd req-6ed9b75d-38f5-4cea-bb96-961f1e13b7e8 service nova] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Updated VIF entry in instance network info cache for port 89db75e9-e46e-4868-a03f-d0d171a5e2c1. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 600.778458] env[62740]: DEBUG nova.network.neutron [req-b69f1a9c-961a-4cf6-8865-05a9c8f04cbd req-6ed9b75d-38f5-4cea-bb96-961f1e13b7e8 service nova] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Updating instance_info_cache with network_info: [{"id": "89db75e9-e46e-4868-a03f-d0d171a5e2c1", "address": "fa:16:3e:ec:ae:59", "network": {"id": "71df833e-f099-40c7-9b1c-c96f8715ef40", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1619380618-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48004a3f53e4434a95b02779399fc7dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89db75e9-e4", "ovs_interfaceid": "89db75e9-e46e-4868-a03f-d0d171a5e2c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.778990] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-b69f1a9c-961a-4cf6-8865-05a9c8f04cbd req-6ed9b75d-38f5-4cea-bb96-961f1e13b7e8 service nova] Expecting reply to msg b990cba4ecc64e6a927120a6104ef4d4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 600.796435] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b990cba4ecc64e6a927120a6104ef4d4 [ 600.796435] env[62740]: DEBUG oslo_concurrency.lockutils [req-b69f1a9c-961a-4cf6-8865-05a9c8f04cbd req-6ed9b75d-38f5-4cea-bb96-961f1e13b7e8 service nova] Releasing lock "refresh_cache-53f7fc38-a541-4843-883d-da7311445fe5" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.856774] env[62740]: DEBUG nova.network.neutron [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 601.008817] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Acquiring lock "75050b95-60c6-4e44-a1d5-0d47492dd739" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.008817] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Lock "75050b95-60c6-4e44-a1d5-0d47492dd739" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.009030] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 24b39377411842d089fe66da36b2f615 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 601.021818] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24b39377411842d089fe66da36b2f615 [ 601.022268] env[62740]: DEBUG nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 601.024096] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg f80241405fb345d4bd508a1127c72d92 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 601.085999] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f80241405fb345d4bd508a1127c72d92 [ 601.120939] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.121400] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.123029] env[62740]: INFO nova.compute.claims [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 601.126916] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 31e40dc1a77a4dbb80f1e34cd0848ff0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 601.193586] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31e40dc1a77a4dbb80f1e34cd0848ff0 [ 601.195354] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 64c9019f20a04db194afb3aa88e83ce2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 601.214252] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64c9019f20a04db194afb3aa88e83ce2 [ 601.373740] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5bf5cfd-879e-470b-bc6b-2cd6068e4c47 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.381635] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3be921c-a845-495b-9194-06aee3af94a0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.417208] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1208f0c-4bee-4f21-8187-859118bd48fe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.426030] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a6ab13-c20e-4ffb-8dd0-07d663e8f0b7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.445361] env[62740]: DEBUG nova.compute.provider_tree [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.445999] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 9c63047a56cd45729928852efca18e20 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 601.461020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c63047a56cd45729928852efca18e20 [ 601.461020] env[62740]: DEBUG nova.scheduler.client.report [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 601.461885] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 07d06ca3ff47455c9042ddb7ddf61a34 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 601.479239] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07d06ca3ff47455c9042ddb7ddf61a34 [ 601.480503] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.359s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.481092] env[62740]: DEBUG nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 601.483064] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 725182742d1549cc96b9e344009f8cbe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 601.529247] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 725182742d1549cc96b9e344009f8cbe [ 601.530831] env[62740]: DEBUG nova.compute.utils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 601.531735] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 5765cd7389a241a798a32e831ba84ab8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 601.536196] env[62740]: DEBUG nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 601.536196] env[62740]: DEBUG nova.network.neutron [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 601.545635] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5765cd7389a241a798a32e831ba84ab8 [ 601.546203] env[62740]: DEBUG nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 601.549902] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 0d99e97d6a324c46b41ccf56e79824e1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 601.586810] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d99e97d6a324c46b41ccf56e79824e1 [ 601.589738] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg ef041a12e0df4f6ab4120a8dcd01c890 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 601.632312] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef041a12e0df4f6ab4120a8dcd01c890 [ 601.633628] env[62740]: DEBUG nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 601.651312] env[62740]: DEBUG nova.network.neutron [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Updating instance_info_cache with network_info: [{"id": "5e56cddb-165b-4421-8e11-e60730241229", "address": "fa:16:3e:89:8c:cb", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e56cddb-16", "ovs_interfaceid": "5e56cddb-165b-4421-8e11-e60730241229", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.651939] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg b035f2427ece4f4c8062d3dc92352435 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 601.669630] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b035f2427ece4f4c8062d3dc92352435 [ 601.670220] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Releasing lock "refresh_cache-fece072a-baac-4301-988c-0068d6e71cff" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.670525] env[62740]: DEBUG nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Instance network_info: |[{"id": "5e56cddb-165b-4421-8e11-e60730241229", "address": "fa:16:3e:89:8c:cb", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e56cddb-16", "ovs_interfaceid": "5e56cddb-165b-4421-8e11-e60730241229", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 601.672020] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:8c:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e56cddb-165b-4421-8e11-e60730241229', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 601.679973] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Creating folder: Project (f38588bc783140a38d77f8967add27d0). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 601.682810] env[62740]: DEBUG nova.virt.hardware [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 601.683067] env[62740]: DEBUG nova.virt.hardware [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 601.683199] env[62740]: DEBUG nova.virt.hardware [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 601.686201] env[62740]: DEBUG nova.virt.hardware [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 601.686201] env[62740]: DEBUG nova.virt.hardware [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 601.686201] env[62740]: DEBUG nova.virt.hardware [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 601.686201] env[62740]: DEBUG nova.virt.hardware [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 601.686201] env[62740]: DEBUG nova.virt.hardware [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 601.686388] env[62740]: DEBUG nova.virt.hardware [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 601.686388] env[62740]: DEBUG nova.virt.hardware [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 601.686388] env[62740]: DEBUG nova.virt.hardware [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 601.686388] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d766af61-02c1-4b64-9467-1d3ceaaf59c6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.688543] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2217b3e2-6919-4bc6-9e6a-0aa4e9fe9e76 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.704730] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Created folder: Project (f38588bc783140a38d77f8967add27d0) in parent group-v156037. [ 601.704730] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Creating folder: Instances. Parent ref: group-v156050. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 601.704730] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6a42fab-56a4-4fa1-af6b-ce2a1e09f5c3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.709534] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11929856-d1de-4122-9399-8d2232f46ec9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.716141] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Created folder: Instances in parent group-v156050. [ 601.716666] env[62740]: DEBUG oslo.service.loopingcall [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 601.724801] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fece072a-baac-4301-988c-0068d6e71cff] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 601.725163] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7dbdbe7a-801c-4866-8014-6131e9eec39f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.746633] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 601.746633] env[62740]: value = "task-640049" [ 601.746633] env[62740]: _type = "Task" [ 601.746633] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.755307] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640049, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.882226] env[62740]: DEBUG nova.network.neutron [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Updating instance_info_cache with network_info: [{"id": "3d0464f5-8822-416b-b56f-51afc395017f", "address": "fa:16:3e:e8:97:42", "network": {"id": "88089b3e-72a9-4e15-8dc8-c2e5438adf03", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-216438215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "965194a3db2448a5a8d75b43a108718d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97b68ed7-8461-4345-b064-96a1dde53a86", "external-id": "nsx-vlan-transportzone-140", "segmentation_id": 140, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d0464f5-88", "ovs_interfaceid": "3d0464f5-8822-416b-b56f-51afc395017f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.882226] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 5365293f866544bcac2b7fbc76cedf34 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 601.899136] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5365293f866544bcac2b7fbc76cedf34 [ 601.899136] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Releasing lock "refresh_cache-3102cc87-df1a-4de8-bfdb-9b904f40ea2e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.900175] env[62740]: DEBUG nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Instance network_info: |[{"id": "3d0464f5-8822-416b-b56f-51afc395017f", "address": "fa:16:3e:e8:97:42", "network": {"id": "88089b3e-72a9-4e15-8dc8-c2e5438adf03", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-216438215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "965194a3db2448a5a8d75b43a108718d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97b68ed7-8461-4345-b064-96a1dde53a86", "external-id": "nsx-vlan-transportzone-140", "segmentation_id": 140, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d0464f5-88", "ovs_interfaceid": "3d0464f5-8822-416b-b56f-51afc395017f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 601.900255] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:97:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '97b68ed7-8461-4345-b064-96a1dde53a86', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d0464f5-8822-416b-b56f-51afc395017f', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 601.911334] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Creating folder: Project (965194a3db2448a5a8d75b43a108718d). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 601.912537] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f112f699-7774-49a8-b361-99c70eca184f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.930480] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Created folder: Project (965194a3db2448a5a8d75b43a108718d) in parent group-v156037. [ 601.930480] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Creating folder: Instances. Parent ref: group-v156053. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 601.930480] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f633789c-a423-4188-aede-e2670deac615 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.940603] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Created folder: Instances in parent group-v156053. [ 601.941099] env[62740]: DEBUG oslo.service.loopingcall [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 601.941383] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 601.941853] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f36f0022-04ec-4bd4-8ec7-f44af580fc92 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.963792] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 601.963792] env[62740]: value = "task-640052" [ 601.963792] env[62740]: _type = "Task" [ 601.963792] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.972543] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640052, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.048360] env[62740]: DEBUG nova.policy [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '97ea89745bbc4cf58786f0d36c4454a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99a3dfbbf12b47e3b7926b25481c2a9c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 602.171793] env[62740]: DEBUG nova.network.neutron [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Successfully updated port: a8ea17f3-420a-465c-9b70-9b279603a92f {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 602.172326] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg c017842235cc4c51bd4ed8cf39aeb8c9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 602.197779] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c017842235cc4c51bd4ed8cf39aeb8c9 [ 602.198298] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Acquiring lock "refresh_cache-4f0d1356-bdfb-4cb2-979a-e28f9025b311" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.198471] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Acquired lock "refresh_cache-4f0d1356-bdfb-4cb2-979a-e28f9025b311" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.198642] env[62740]: DEBUG nova.network.neutron [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 602.199039] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 38ccd6f8bd2d4a15b9fe7341fa07b8b4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 602.206431] env[62740]: DEBUG nova.network.neutron [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Updated VIF entry in instance network info cache for port 136a594e-2108-4bbf-9bd6-f36d7263aedb. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 602.206810] env[62740]: DEBUG nova.network.neutron [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Updating instance_info_cache with network_info: [{"id": "136a594e-2108-4bbf-9bd6-f36d7263aedb", "address": "fa:16:3e:33:4a:20", "network": {"id": "c706a27c-b24b-4f31-9930-eff5254266f2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-41588659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f8758e87377404eb222264dba749e83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "796c7fcb-00fd-4692-a44b-7ec550201e86", "external-id": "nsx-vlan-transportzone-42", "segmentation_id": 42, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap136a594e-21", "ovs_interfaceid": "136a594e-2108-4bbf-9bd6-f36d7263aedb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.207344] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] Expecting reply to msg 62cf725749e447309babfba44f424155 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 602.218345] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38ccd6f8bd2d4a15b9fe7341fa07b8b4 [ 602.222337] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62cf725749e447309babfba44f424155 [ 602.225323] env[62740]: DEBUG oslo_concurrency.lockutils [req-f9db62eb-d4dc-41db-a02e-afe6bdee5358 req-ece10d45-3a88-4e3e-87b7-24a6096177f3 service nova] Releasing lock "refresh_cache-9d175573-2af2-4f66-98cd-411d10f749f0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.258427] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640049, 'name': CreateVM_Task, 'duration_secs': 0.339223} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.258723] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fece072a-baac-4301-988c-0068d6e71cff] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 602.259171] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.259432] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.259716] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 602.260354] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34bb8406-59fa-459b-8eb0-321e4fce7af3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.265920] env[62740]: DEBUG oslo_vmware.api [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Waiting for the task: (returnval){ [ 602.265920] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52287a0f-504e-c9d8-ff3c-951939393828" [ 602.265920] env[62740]: _type = "Task" [ 602.265920] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.274934] env[62740]: DEBUG oslo_vmware.api [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52287a0f-504e-c9d8-ff3c-951939393828, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.361909] env[62740]: DEBUG nova.network.neutron [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 602.475649] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640052, 'name': CreateVM_Task, 'duration_secs': 0.322406} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.475896] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 602.476466] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.778010] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.778631] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 602.778631] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.778770] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.779043] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 602.779337] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01af9a16-623f-469c-abb8-c50b194da9de {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.783770] env[62740]: DEBUG oslo_vmware.api [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Waiting for the task: (returnval){ [ 602.783770] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]529aa093-5176-aeb6-d203-fd5ea7e5cf5f" [ 602.783770] env[62740]: _type = "Task" [ 602.783770] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.793643] env[62740]: DEBUG oslo_vmware.api [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]529aa093-5176-aeb6-d203-fd5ea7e5cf5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.296079] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.296079] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 603.296328] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.310150] env[62740]: DEBUG nova.network.neutron [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Updating instance_info_cache with network_info: [{"id": "a8ea17f3-420a-465c-9b70-9b279603a92f", "address": "fa:16:3e:c4:b9:14", "network": {"id": "25aa10e1-0c9e-4529-b1d4-ebb51f178344", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-219123173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a29640051052481dace61e0c2197b229", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8ea17f3-42", "ovs_interfaceid": "a8ea17f3-420a-465c-9b70-9b279603a92f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.310150] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 979e3a7f11164e26abc2cae6e06fd34f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 603.322016] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 979e3a7f11164e26abc2cae6e06fd34f [ 603.322653] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Releasing lock "refresh_cache-4f0d1356-bdfb-4cb2-979a-e28f9025b311" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.324787] env[62740]: DEBUG nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Instance network_info: |[{"id": "a8ea17f3-420a-465c-9b70-9b279603a92f", "address": "fa:16:3e:c4:b9:14", "network": {"id": "25aa10e1-0c9e-4529-b1d4-ebb51f178344", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-219123173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a29640051052481dace61e0c2197b229", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8ea17f3-42", "ovs_interfaceid": "a8ea17f3-420a-465c-9b70-9b279603a92f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 603.325178] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:b9:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '051f343d-ac4f-4070-a26d-467603122c81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8ea17f3-420a-465c-9b70-9b279603a92f', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 603.331982] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Creating folder: Project (a29640051052481dace61e0c2197b229). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 603.332786] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b79db66d-d304-42f7-9d93-51f9b45ee95d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.344385] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Created folder: Project (a29640051052481dace61e0c2197b229) in parent group-v156037. [ 603.344605] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Creating folder: Instances. Parent ref: group-v156056. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 603.344852] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d40c44b8-8a00-4610-9591-5be7a0759cbd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.357030] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Created folder: Instances in parent group-v156056. [ 603.357292] env[62740]: DEBUG oslo.service.loopingcall [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 603.357481] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 603.357681] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b95230d6-118c-48c2-b88c-74478e7a9a2c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.381887] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 603.381887] env[62740]: value = "task-640055" [ 603.381887] env[62740]: _type = "Task" [ 603.381887] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.392577] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640055, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.414050] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "8053d2ae-ca61-4282-aa89-83f3a2e107bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.414300] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "8053d2ae-ca61-4282-aa89-83f3a2e107bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.414820] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 7e4e441471c24e849bbbbf61091a2e32 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 603.436743] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e4e441471c24e849bbbbf61091a2e32 [ 603.437426] env[62740]: DEBUG nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 603.440750] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg e96b960db73d47f5938a95721a16bd11 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 603.485269] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e96b960db73d47f5938a95721a16bd11 [ 603.512053] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.514565] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.514565] env[62740]: INFO nova.compute.claims [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.515568] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg f012b7ee04874826a2b2b54873ecb2f1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 603.561829] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f012b7ee04874826a2b2b54873ecb2f1 [ 603.563320] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 7da55a54726e4728a08f4b5b87f77c18 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 603.581153] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7da55a54726e4728a08f4b5b87f77c18 [ 603.610109] env[62740]: DEBUG nova.network.neutron [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Successfully created port: b07df930-5b4a-4eac-857e-90edff87f3a6 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 603.766016] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93517af6-9e36-4df1-bad5-88a825137539 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.775924] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7694f5-88b9-4e2c-bac0-39abf249257b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.811071] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd69e455-9689-4a9f-8cc5-cc86ddf3a3f0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.819501] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cce745a-0c21-469e-81af-0008566ee5d7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.835322] env[62740]: DEBUG nova.compute.provider_tree [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 603.835876] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg c1dfdf5f4ef84cf6ad2e001685d9b6f7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 603.848256] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1dfdf5f4ef84cf6ad2e001685d9b6f7 [ 603.848256] env[62740]: DEBUG nova.scheduler.client.report [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 603.848503] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 0625e5fd666f44c4a1dc2a6a2f69e07b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 603.863649] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0625e5fd666f44c4a1dc2a6a2f69e07b [ 603.864746] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.352s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.865243] env[62740]: DEBUG nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 603.867570] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 6b0267af05c449a4b06942fb6d399610 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 603.892783] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640055, 'name': CreateVM_Task, 'duration_secs': 0.320415} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.892965] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 603.893728] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.893840] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.894309] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 603.894417] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-151e550e-a2cf-421a-a252-c0c27a169f05 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.900341] env[62740]: DEBUG oslo_vmware.api [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Waiting for the task: (returnval){ [ 603.900341] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52a03c53-bcd1-85cc-e99e-0e6bb7d5ab1a" [ 603.900341] env[62740]: _type = "Task" [ 603.900341] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.909313] env[62740]: DEBUG oslo_vmware.api [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52a03c53-bcd1-85cc-e99e-0e6bb7d5ab1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.909937] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b0267af05c449a4b06942fb6d399610 [ 603.911216] env[62740]: DEBUG nova.compute.utils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.911824] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 7ebde6c632de49b88b9b5f3a7ba8c991 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 603.912659] env[62740]: DEBUG nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 603.912825] env[62740]: DEBUG nova.network.neutron [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 603.924293] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ebde6c632de49b88b9b5f3a7ba8c991 [ 603.925588] env[62740]: DEBUG nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 603.927641] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg c4f99c23f81044b7883362178d55bb77 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 603.965715] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4f99c23f81044b7883362178d55bb77 [ 603.966657] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg fa99c0e708744abca1a60ebab6fffd19 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 604.001722] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa99c0e708744abca1a60ebab6fffd19 [ 604.002850] env[62740]: DEBUG nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 604.029547] env[62740]: DEBUG nova.virt.hardware [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 604.029813] env[62740]: DEBUG nova.virt.hardware [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 604.029984] env[62740]: DEBUG nova.virt.hardware [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 604.030213] env[62740]: DEBUG nova.virt.hardware [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 604.030336] env[62740]: DEBUG nova.virt.hardware [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 604.030489] env[62740]: DEBUG nova.virt.hardware [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 604.030701] env[62740]: DEBUG nova.virt.hardware [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 604.030865] env[62740]: DEBUG nova.virt.hardware [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 604.031342] env[62740]: DEBUG nova.virt.hardware [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 604.031591] env[62740]: DEBUG nova.virt.hardware [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 604.032512] env[62740]: DEBUG nova.virt.hardware [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 604.034409] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c1bf7d-71a8-478e-9277-8bd279243811 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.043992] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d09071e3-40c6-457b-ad78-a916ca2616a9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.202464] env[62740]: DEBUG nova.policy [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa549a18cbf84678844e14ddd094d70e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '065d149aea7645d7a5e32c0d14ff0936', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 604.414054] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.414343] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 604.414561] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.529978] env[62740]: DEBUG nova.compute.manager [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Received event network-vif-plugged-3d0464f5-8822-416b-b56f-51afc395017f {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 604.529978] env[62740]: DEBUG oslo_concurrency.lockutils [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] Acquiring lock "3102cc87-df1a-4de8-bfdb-9b904f40ea2e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.529978] env[62740]: DEBUG oslo_concurrency.lockutils [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] Lock "3102cc87-df1a-4de8-bfdb-9b904f40ea2e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.529978] env[62740]: DEBUG oslo_concurrency.lockutils [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] Lock "3102cc87-df1a-4de8-bfdb-9b904f40ea2e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.531527] env[62740]: DEBUG nova.compute.manager [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] No waiting events found dispatching network-vif-plugged-3d0464f5-8822-416b-b56f-51afc395017f {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 604.532714] env[62740]: WARNING nova.compute.manager [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Received unexpected event network-vif-plugged-3d0464f5-8822-416b-b56f-51afc395017f for instance with vm_state building and task_state spawning. [ 604.533467] env[62740]: DEBUG nova.compute.manager [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Received event network-changed-3d0464f5-8822-416b-b56f-51afc395017f {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 604.536101] env[62740]: DEBUG nova.compute.manager [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Refreshing instance network info cache due to event network-changed-3d0464f5-8822-416b-b56f-51afc395017f. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 604.536101] env[62740]: DEBUG oslo_concurrency.lockutils [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] Acquiring lock "refresh_cache-3102cc87-df1a-4de8-bfdb-9b904f40ea2e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.536101] env[62740]: DEBUG oslo_concurrency.lockutils [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] Acquired lock "refresh_cache-3102cc87-df1a-4de8-bfdb-9b904f40ea2e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.536101] env[62740]: DEBUG nova.network.neutron [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Refreshing network info cache for port 3d0464f5-8822-416b-b56f-51afc395017f {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 604.536101] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] Expecting reply to msg f202cae334194cafb41a7eca6b503970 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 604.551255] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f202cae334194cafb41a7eca6b503970 [ 605.210043] env[62740]: DEBUG nova.compute.manager [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Received event network-vif-plugged-78c7aabd-abfc-4814-8f7d-90ad8943739f {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 605.210254] env[62740]: DEBUG oslo_concurrency.lockutils [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Acquiring lock "b326be2c-43f2-4f04-9652-cec7e017288e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.210793] env[62740]: DEBUG oslo_concurrency.lockutils [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Lock "b326be2c-43f2-4f04-9652-cec7e017288e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.210999] env[62740]: DEBUG oslo_concurrency.lockutils [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Lock "b326be2c-43f2-4f04-9652-cec7e017288e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.212296] env[62740]: DEBUG nova.compute.manager [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] No waiting events found dispatching network-vif-plugged-78c7aabd-abfc-4814-8f7d-90ad8943739f {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 605.212504] env[62740]: WARNING nova.compute.manager [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Received unexpected event network-vif-plugged-78c7aabd-abfc-4814-8f7d-90ad8943739f for instance with vm_state building and task_state spawning. [ 605.212725] env[62740]: DEBUG nova.compute.manager [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Received event network-changed-78c7aabd-abfc-4814-8f7d-90ad8943739f {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 605.212863] env[62740]: DEBUG nova.compute.manager [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Refreshing instance network info cache due to event network-changed-78c7aabd-abfc-4814-8f7d-90ad8943739f. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 605.213084] env[62740]: DEBUG oslo_concurrency.lockutils [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Acquiring lock "refresh_cache-b326be2c-43f2-4f04-9652-cec7e017288e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.213510] env[62740]: DEBUG oslo_concurrency.lockutils [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Acquired lock "refresh_cache-b326be2c-43f2-4f04-9652-cec7e017288e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.213629] env[62740]: DEBUG nova.network.neutron [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Refreshing network info cache for port 78c7aabd-abfc-4814-8f7d-90ad8943739f {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 605.214179] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Expecting reply to msg 1d353e02d2084b3db92fae43139b3080 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 605.233459] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d353e02d2084b3db92fae43139b3080 [ 605.564640] env[62740]: DEBUG nova.network.neutron [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Successfully created port: 55cd9e15-32c1-41e3-8a19-a6a6128fbc87 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 606.057339] env[62740]: DEBUG nova.network.neutron [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Updated VIF entry in instance network info cache for port 3d0464f5-8822-416b-b56f-51afc395017f. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 606.057780] env[62740]: DEBUG nova.network.neutron [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Updating instance_info_cache with network_info: [{"id": "3d0464f5-8822-416b-b56f-51afc395017f", "address": "fa:16:3e:e8:97:42", "network": {"id": "88089b3e-72a9-4e15-8dc8-c2e5438adf03", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-216438215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "965194a3db2448a5a8d75b43a108718d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97b68ed7-8461-4345-b064-96a1dde53a86", "external-id": "nsx-vlan-transportzone-140", "segmentation_id": 140, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d0464f5-88", "ovs_interfaceid": "3d0464f5-8822-416b-b56f-51afc395017f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.058426] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] Expecting reply to msg de4ddfa23d964ec1bdd1555887b1859d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 606.071513] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de4ddfa23d964ec1bdd1555887b1859d [ 606.072248] env[62740]: DEBUG oslo_concurrency.lockutils [req-dc6168bf-fdca-4d40-b55d-887d55666d28 req-1ac933b2-97d2-434e-b25d-4a616076be0d service nova] Releasing lock "refresh_cache-3102cc87-df1a-4de8-bfdb-9b904f40ea2e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.633852] env[62740]: DEBUG nova.network.neutron [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Successfully updated port: b07df930-5b4a-4eac-857e-90edff87f3a6 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 606.634365] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg f767244423a74d37b2502e0d67d04436 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 606.647366] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f767244423a74d37b2502e0d67d04436 [ 606.648646] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Acquiring lock "refresh_cache-75050b95-60c6-4e44-a1d5-0d47492dd739" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.652751] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Acquired lock "refresh_cache-75050b95-60c6-4e44-a1d5-0d47492dd739" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.652751] env[62740]: DEBUG nova.network.neutron [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 606.652751] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 8e241a4fef2248e7ae06753ac964d841 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 606.665369] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e241a4fef2248e7ae06753ac964d841 [ 606.812898] env[62740]: DEBUG nova.network.neutron [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 607.118091] env[62740]: DEBUG nova.network.neutron [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Updated VIF entry in instance network info cache for port 78c7aabd-abfc-4814-8f7d-90ad8943739f. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 607.118091] env[62740]: DEBUG nova.network.neutron [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Updating instance_info_cache with network_info: [{"id": "78c7aabd-abfc-4814-8f7d-90ad8943739f", "address": "fa:16:3e:49:78:c3", "network": {"id": "c1ab5dd8-1f3a-4091-af45-4bce90893657", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1720290614-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c12d94f17bff48b48f03a4cf2c692c3a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78c7aabd-ab", "ovs_interfaceid": "78c7aabd-abfc-4814-8f7d-90ad8943739f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.118246] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Expecting reply to msg 5ebee1b871ac47278a94110cc0fb1a3a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 607.129183] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ebee1b871ac47278a94110cc0fb1a3a [ 607.129950] env[62740]: DEBUG oslo_concurrency.lockutils [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Releasing lock "refresh_cache-b326be2c-43f2-4f04-9652-cec7e017288e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.130146] env[62740]: DEBUG nova.compute.manager [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: fece072a-baac-4301-988c-0068d6e71cff] Received event network-vif-plugged-5e56cddb-165b-4421-8e11-e60730241229 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 607.130302] env[62740]: DEBUG oslo_concurrency.lockutils [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Acquiring lock "fece072a-baac-4301-988c-0068d6e71cff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.130497] env[62740]: DEBUG oslo_concurrency.lockutils [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Lock "fece072a-baac-4301-988c-0068d6e71cff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.130654] env[62740]: DEBUG oslo_concurrency.lockutils [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Lock "fece072a-baac-4301-988c-0068d6e71cff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.130817] env[62740]: DEBUG nova.compute.manager [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: fece072a-baac-4301-988c-0068d6e71cff] No waiting events found dispatching network-vif-plugged-5e56cddb-165b-4421-8e11-e60730241229 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 607.130981] env[62740]: WARNING nova.compute.manager [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: fece072a-baac-4301-988c-0068d6e71cff] Received unexpected event network-vif-plugged-5e56cddb-165b-4421-8e11-e60730241229 for instance with vm_state building and task_state spawning. [ 607.131157] env[62740]: DEBUG nova.compute.manager [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: fece072a-baac-4301-988c-0068d6e71cff] Received event network-changed-5e56cddb-165b-4421-8e11-e60730241229 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 607.131323] env[62740]: DEBUG nova.compute.manager [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: fece072a-baac-4301-988c-0068d6e71cff] Refreshing instance network info cache due to event network-changed-5e56cddb-165b-4421-8e11-e60730241229. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 607.131500] env[62740]: DEBUG oslo_concurrency.lockutils [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Acquiring lock "refresh_cache-fece072a-baac-4301-988c-0068d6e71cff" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.131636] env[62740]: DEBUG oslo_concurrency.lockutils [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Acquired lock "refresh_cache-fece072a-baac-4301-988c-0068d6e71cff" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.132485] env[62740]: DEBUG nova.network.neutron [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: fece072a-baac-4301-988c-0068d6e71cff] Refreshing network info cache for port 5e56cddb-165b-4421-8e11-e60730241229 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 607.133374] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Expecting reply to msg 32f86e2eb88b4b8991584d55ea0c8ecc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 607.143721] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32f86e2eb88b4b8991584d55ea0c8ecc [ 607.866342] env[62740]: DEBUG nova.network.neutron [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Updating instance_info_cache with network_info: [{"id": "b07df930-5b4a-4eac-857e-90edff87f3a6", "address": "fa:16:3e:85:cd:6f", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb07df930-5b", "ovs_interfaceid": "b07df930-5b4a-4eac-857e-90edff87f3a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.866342] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg dcc84755492c4b41a2387292435538b3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 607.882661] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcc84755492c4b41a2387292435538b3 [ 607.885151] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Releasing lock "refresh_cache-75050b95-60c6-4e44-a1d5-0d47492dd739" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.885151] env[62740]: DEBUG nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Instance network_info: |[{"id": "b07df930-5b4a-4eac-857e-90edff87f3a6", "address": "fa:16:3e:85:cd:6f", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb07df930-5b", "ovs_interfaceid": "b07df930-5b4a-4eac-857e-90edff87f3a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 607.885343] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:cd:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b07df930-5b4a-4eac-857e-90edff87f3a6', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 607.900696] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Creating folder: Project (99a3dfbbf12b47e3b7926b25481c2a9c). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 607.902089] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-abe0c23c-3095-47aa-b934-468ec161dedf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.913834] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Created folder: Project (99a3dfbbf12b47e3b7926b25481c2a9c) in parent group-v156037. [ 607.914170] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Creating folder: Instances. Parent ref: group-v156059. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 607.914508] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-43dec07c-57a5-4141-80ee-e59ea1b325a5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.925497] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Created folder: Instances in parent group-v156059. [ 607.925850] env[62740]: DEBUG oslo.service.loopingcall [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 607.926146] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 607.926443] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b45ffd84-f8f3-41d0-be5d-40b28effc116 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.957738] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 607.957738] env[62740]: value = "task-640058" [ 607.957738] env[62740]: _type = "Task" [ 607.957738] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.969793] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640058, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.471884] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640058, 'name': CreateVM_Task, 'duration_secs': 0.304067} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.471884] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 608.472665] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.473108] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.473669] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 608.474575] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2236054-6d10-4fb8-b1ae-6793bb1f681c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.479617] env[62740]: DEBUG oslo_vmware.api [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Waiting for the task: (returnval){ [ 608.479617] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5255f7bb-3074-4f2f-18fb-03ec351448a0" [ 608.479617] env[62740]: _type = "Task" [ 608.479617] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.491546] env[62740]: DEBUG oslo_vmware.api [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5255f7bb-3074-4f2f-18fb-03ec351448a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.637032] env[62740]: DEBUG nova.network.neutron [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: fece072a-baac-4301-988c-0068d6e71cff] Updated VIF entry in instance network info cache for port 5e56cddb-165b-4421-8e11-e60730241229. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 608.637400] env[62740]: DEBUG nova.network.neutron [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] [instance: fece072a-baac-4301-988c-0068d6e71cff] Updating instance_info_cache with network_info: [{"id": "5e56cddb-165b-4421-8e11-e60730241229", "address": "fa:16:3e:89:8c:cb", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e56cddb-16", "ovs_interfaceid": "5e56cddb-165b-4421-8e11-e60730241229", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.637917] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Expecting reply to msg f97c59e99c494d659b3f6ee448479392 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 608.649328] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f97c59e99c494d659b3f6ee448479392 [ 608.650025] env[62740]: DEBUG oslo_concurrency.lockutils [req-4919671c-73e8-432b-bd79-90bcd41ea7ad req-421869d2-57e4-40a4-b0da-be273487ad5a service nova] Releasing lock "refresh_cache-fece072a-baac-4301-988c-0068d6e71cff" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.993615] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.993615] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 608.993615] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.172023] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Acquiring lock "a24df1e4-2865-4ab3-beae-0892dca12bef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.172023] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Lock "a24df1e4-2865-4ab3-beae-0892dca12bef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.172726] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg ec3ae225e88141bbb43e22a5229759e7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 609.191187] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec3ae225e88141bbb43e22a5229759e7 [ 609.192491] env[62740]: DEBUG nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 609.194350] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg afde95e792534cce9e85f5b1a158412e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 609.247173] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afde95e792534cce9e85f5b1a158412e [ 609.277971] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.278286] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.279926] env[62740]: INFO nova.compute.claims [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 609.281754] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg ba0089ccf0f04b10927e0eb0c2345b53 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 609.326939] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba0089ccf0f04b10927e0eb0c2345b53 [ 609.328746] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg a28fa75ade134ed7aa6fc1044a346425 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 609.336762] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a28fa75ade134ed7aa6fc1044a346425 [ 609.365085] env[62740]: DEBUG nova.network.neutron [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Successfully updated port: 55cd9e15-32c1-41e3-8a19-a6a6128fbc87 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 609.365574] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 4528de7a8a07431aabf1388f79251a39 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 609.379682] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4528de7a8a07431aabf1388f79251a39 [ 609.380230] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "refresh_cache-8053d2ae-ca61-4282-aa89-83f3a2e107bc" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.380366] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "refresh_cache-8053d2ae-ca61-4282-aa89-83f3a2e107bc" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.380535] env[62740]: DEBUG nova.network.neutron [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 609.380925] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg a19eed236fc0437aab98c89d73f976ee in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 609.390750] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a19eed236fc0437aab98c89d73f976ee [ 609.590598] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1d0773-2c7e-4711-b890-64f107e0177c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.598826] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1d77a9-504d-4632-b948-33c5a560f172 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.635944] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5570c710-c642-42ee-ab88-3ae1e23ca8c3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.644334] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d223a91-6002-460f-bd71-4f5363af811e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.663570] env[62740]: DEBUG nova.compute.provider_tree [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.664177] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg bcdefff20a8f4a94b27531ea80806347 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 609.673991] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcdefff20a8f4a94b27531ea80806347 [ 609.674979] env[62740]: DEBUG nova.scheduler.client.report [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 609.677392] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 37750d5758d44e588b6d52996f6a6864 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 609.692152] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37750d5758d44e588b6d52996f6a6864 [ 609.692974] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.415s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.693494] env[62740]: DEBUG nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 609.696666] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg edf9d4ce42934d55ada6774949daba97 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 609.741616] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edf9d4ce42934d55ada6774949daba97 [ 609.743288] env[62740]: DEBUG nova.compute.utils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 609.743703] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg c857e883a4754896be6d7869788240f9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 609.744840] env[62740]: DEBUG nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 609.744999] env[62740]: DEBUG nova.network.neutron [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 609.762072] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c857e883a4754896be6d7869788240f9 [ 609.762072] env[62740]: DEBUG nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 609.763823] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 706810a6d9e44291a60b6f9227189541 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 609.801355] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 706810a6d9e44291a60b6f9227189541 [ 609.804778] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 83b55d307bef4b388d240f6154955b8a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 609.806699] env[62740]: DEBUG nova.network.neutron [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.843159] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83b55d307bef4b388d240f6154955b8a [ 609.844433] env[62740]: DEBUG nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 609.871081] env[62740]: DEBUG nova.virt.hardware [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 609.871081] env[62740]: DEBUG nova.virt.hardware [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 609.871081] env[62740]: DEBUG nova.virt.hardware [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 609.871258] env[62740]: DEBUG nova.virt.hardware [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 609.871258] env[62740]: DEBUG nova.virt.hardware [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 609.871929] env[62740]: DEBUG nova.virt.hardware [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 609.872350] env[62740]: DEBUG nova.virt.hardware [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 609.872705] env[62740]: DEBUG nova.virt.hardware [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 609.873027] env[62740]: DEBUG nova.virt.hardware [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 609.873320] env[62740]: DEBUG nova.virt.hardware [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 609.873776] env[62740]: DEBUG nova.virt.hardware [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 609.875692] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2784bb80-1283-4967-b7bc-c0340182c01b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.884477] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e000e4-e07f-4861-bd31-49db5d0abd7a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.101409] env[62740]: DEBUG nova.policy [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64b87b891c004c21ae65a4e8f41c5d33', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6ec2bd0781548ea9620d349be9b9ff2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 610.161088] env[62740]: DEBUG nova.compute.manager [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Received event network-vif-plugged-a8ea17f3-420a-465c-9b70-9b279603a92f {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 610.161320] env[62740]: DEBUG oslo_concurrency.lockutils [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Acquiring lock "4f0d1356-bdfb-4cb2-979a-e28f9025b311-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.161547] env[62740]: DEBUG oslo_concurrency.lockutils [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Lock "4f0d1356-bdfb-4cb2-979a-e28f9025b311-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.163027] env[62740]: DEBUG oslo_concurrency.lockutils [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Lock "4f0d1356-bdfb-4cb2-979a-e28f9025b311-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.163027] env[62740]: DEBUG nova.compute.manager [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] No waiting events found dispatching network-vif-plugged-a8ea17f3-420a-465c-9b70-9b279603a92f {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 610.163027] env[62740]: WARNING nova.compute.manager [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Received unexpected event network-vif-plugged-a8ea17f3-420a-465c-9b70-9b279603a92f for instance with vm_state building and task_state spawning. [ 610.163144] env[62740]: DEBUG nova.compute.manager [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Received event network-changed-a8ea17f3-420a-465c-9b70-9b279603a92f {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 610.163397] env[62740]: DEBUG nova.compute.manager [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Refreshing instance network info cache due to event network-changed-a8ea17f3-420a-465c-9b70-9b279603a92f. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 610.163807] env[62740]: DEBUG oslo_concurrency.lockutils [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Acquiring lock "refresh_cache-4f0d1356-bdfb-4cb2-979a-e28f9025b311" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.163953] env[62740]: DEBUG oslo_concurrency.lockutils [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Acquired lock "refresh_cache-4f0d1356-bdfb-4cb2-979a-e28f9025b311" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.164237] env[62740]: DEBUG nova.network.neutron [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Refreshing network info cache for port a8ea17f3-420a-465c-9b70-9b279603a92f {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 610.164740] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Expecting reply to msg 192c8d0d96ec43998b4883ee2d8c61d8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 610.178214] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 192c8d0d96ec43998b4883ee2d8c61d8 [ 610.785510] env[62740]: DEBUG nova.network.neutron [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Updated VIF entry in instance network info cache for port a8ea17f3-420a-465c-9b70-9b279603a92f. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 610.785510] env[62740]: DEBUG nova.network.neutron [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Updating instance_info_cache with network_info: [{"id": "a8ea17f3-420a-465c-9b70-9b279603a92f", "address": "fa:16:3e:c4:b9:14", "network": {"id": "25aa10e1-0c9e-4529-b1d4-ebb51f178344", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-219123173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a29640051052481dace61e0c2197b229", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "051f343d-ac4f-4070-a26d-467603122c81", "external-id": "nsx-vlan-transportzone-277", "segmentation_id": 277, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8ea17f3-42", "ovs_interfaceid": "a8ea17f3-420a-465c-9b70-9b279603a92f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.785670] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Expecting reply to msg e997dbf1778f4083884128cda57fc45c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 610.797606] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e997dbf1778f4083884128cda57fc45c [ 610.798475] env[62740]: DEBUG oslo_concurrency.lockutils [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Releasing lock "refresh_cache-4f0d1356-bdfb-4cb2-979a-e28f9025b311" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.799148] env[62740]: DEBUG nova.compute.manager [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Received event network-vif-plugged-b07df930-5b4a-4eac-857e-90edff87f3a6 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 610.799497] env[62740]: DEBUG oslo_concurrency.lockutils [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Acquiring lock "75050b95-60c6-4e44-a1d5-0d47492dd739-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.799831] env[62740]: DEBUG oslo_concurrency.lockutils [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Lock "75050b95-60c6-4e44-a1d5-0d47492dd739-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.800425] env[62740]: DEBUG oslo_concurrency.lockutils [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Lock "75050b95-60c6-4e44-a1d5-0d47492dd739-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.800726] env[62740]: DEBUG nova.compute.manager [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] No waiting events found dispatching network-vif-plugged-b07df930-5b4a-4eac-857e-90edff87f3a6 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 610.801022] env[62740]: WARNING nova.compute.manager [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Received unexpected event network-vif-plugged-b07df930-5b4a-4eac-857e-90edff87f3a6 for instance with vm_state building and task_state spawning. [ 610.801321] env[62740]: DEBUG nova.compute.manager [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Received event network-changed-b07df930-5b4a-4eac-857e-90edff87f3a6 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 610.801595] env[62740]: DEBUG nova.compute.manager [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Refreshing instance network info cache due to event network-changed-b07df930-5b4a-4eac-857e-90edff87f3a6. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 610.801891] env[62740]: DEBUG oslo_concurrency.lockutils [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Acquiring lock "refresh_cache-75050b95-60c6-4e44-a1d5-0d47492dd739" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.802163] env[62740]: DEBUG oslo_concurrency.lockutils [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Acquired lock "refresh_cache-75050b95-60c6-4e44-a1d5-0d47492dd739" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.802433] env[62740]: DEBUG nova.network.neutron [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Refreshing network info cache for port b07df930-5b4a-4eac-857e-90edff87f3a6 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 610.803030] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Expecting reply to msg 1e9c8b3203dd42608c64856f6a69f9c6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 610.812324] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e9c8b3203dd42608c64856f6a69f9c6 [ 610.974489] env[62740]: DEBUG nova.network.neutron [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Updating instance_info_cache with network_info: [{"id": "55cd9e15-32c1-41e3-8a19-a6a6128fbc87", "address": "fa:16:3e:fa:6d:1d", "network": {"id": "a1bf429f-63e1-4b06-ba31-36e8e686268d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1763096855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "065d149aea7645d7a5e32c0d14ff0936", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55cd9e15-32", "ovs_interfaceid": "55cd9e15-32c1-41e3-8a19-a6a6128fbc87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.974489] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 8a81d32c31cf443e90f036f36985f79d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 610.992021] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a81d32c31cf443e90f036f36985f79d [ 610.992021] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Releasing lock "refresh_cache-8053d2ae-ca61-4282-aa89-83f3a2e107bc" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.992273] env[62740]: DEBUG nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Instance network_info: |[{"id": "55cd9e15-32c1-41e3-8a19-a6a6128fbc87", "address": "fa:16:3e:fa:6d:1d", "network": {"id": "a1bf429f-63e1-4b06-ba31-36e8e686268d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1763096855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "065d149aea7645d7a5e32c0d14ff0936", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55cd9e15-32", "ovs_interfaceid": "55cd9e15-32c1-41e3-8a19-a6a6128fbc87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 610.992349] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:6d:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b399c74-1411-408a-b4cd-84e268ae83fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55cd9e15-32c1-41e3-8a19-a6a6128fbc87', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 611.000958] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating folder: Project (065d149aea7645d7a5e32c0d14ff0936). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 611.002465] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de1088d6-f19d-49e9-934d-7b35703632ea {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.015216] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Created folder: Project (065d149aea7645d7a5e32c0d14ff0936) in parent group-v156037. [ 611.015216] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating folder: Instances. Parent ref: group-v156065. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 611.015216] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d413605-b4d5-4142-b490-e6901428efac {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.022505] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Created folder: Instances in parent group-v156065. [ 611.022592] env[62740]: DEBUG oslo.service.loopingcall [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 611.022774] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 611.022989] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c448331e-d78f-40c4-ad9d-d14e6d903fb6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.044114] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 611.044114] env[62740]: value = "task-640065" [ 611.044114] env[62740]: _type = "Task" [ 611.044114] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.054455] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640065, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.275415] env[62740]: DEBUG nova.network.neutron [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Updated VIF entry in instance network info cache for port b07df930-5b4a-4eac-857e-90edff87f3a6. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 611.275415] env[62740]: DEBUG nova.network.neutron [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Updating instance_info_cache with network_info: [{"id": "b07df930-5b4a-4eac-857e-90edff87f3a6", "address": "fa:16:3e:85:cd:6f", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb07df930-5b", "ovs_interfaceid": "b07df930-5b4a-4eac-857e-90edff87f3a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.275707] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Expecting reply to msg 07e2446567fe45af85f581ae9943b673 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 611.286742] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07e2446567fe45af85f581ae9943b673 [ 611.288205] env[62740]: DEBUG oslo_concurrency.lockutils [req-d345be63-18b5-49d5-9286-6941dd092d0c req-67d9980b-5a67-4385-aea5-d34533d6dc58 service nova] Releasing lock "refresh_cache-75050b95-60c6-4e44-a1d5-0d47492dd739" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.561921] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640065, 'name': CreateVM_Task, 'duration_secs': 0.366551} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.561921] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 611.562266] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.562391] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.562747] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 611.563053] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7abc9c77-a143-45b8-97bc-e38c527c53bc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.569274] env[62740]: DEBUG oslo_vmware.api [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 611.569274] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52790537-9e2b-8236-471a-0df9e478c730" [ 611.569274] env[62740]: _type = "Task" [ 611.569274] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.580059] env[62740]: DEBUG oslo_vmware.api [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52790537-9e2b-8236-471a-0df9e478c730, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.854935] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Acquiring lock "4f89b342-0375-48f5-b5cf-713a8d57a182" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.854935] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Lock "4f89b342-0375-48f5-b5cf-713a8d57a182" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.922315] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquiring lock "149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.922577] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.085212] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.085606] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 612.086286] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.357542] env[62740]: DEBUG nova.network.neutron [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Successfully created port: 51ca1534-f4b4-4041-a119-e853f57f7a51 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 612.882985] env[62740]: DEBUG nova.compute.manager [req-0acefd2e-a586-4cc7-a612-70d8c8fae880 req-5ef41b9f-c8e2-4c35-8d66-c120fbf1df83 service nova] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Received event network-vif-plugged-55cd9e15-32c1-41e3-8a19-a6a6128fbc87 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 612.883618] env[62740]: DEBUG oslo_concurrency.lockutils [req-0acefd2e-a586-4cc7-a612-70d8c8fae880 req-5ef41b9f-c8e2-4c35-8d66-c120fbf1df83 service nova] Acquiring lock "8053d2ae-ca61-4282-aa89-83f3a2e107bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.883618] env[62740]: DEBUG oslo_concurrency.lockutils [req-0acefd2e-a586-4cc7-a612-70d8c8fae880 req-5ef41b9f-c8e2-4c35-8d66-c120fbf1df83 service nova] Lock "8053d2ae-ca61-4282-aa89-83f3a2e107bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.883618] env[62740]: DEBUG oslo_concurrency.lockutils [req-0acefd2e-a586-4cc7-a612-70d8c8fae880 req-5ef41b9f-c8e2-4c35-8d66-c120fbf1df83 service nova] Lock "8053d2ae-ca61-4282-aa89-83f3a2e107bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.883773] env[62740]: DEBUG nova.compute.manager [req-0acefd2e-a586-4cc7-a612-70d8c8fae880 req-5ef41b9f-c8e2-4c35-8d66-c120fbf1df83 service nova] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] No waiting events found dispatching network-vif-plugged-55cd9e15-32c1-41e3-8a19-a6a6128fbc87 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 612.883918] env[62740]: WARNING nova.compute.manager [req-0acefd2e-a586-4cc7-a612-70d8c8fae880 req-5ef41b9f-c8e2-4c35-8d66-c120fbf1df83 service nova] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Received unexpected event network-vif-plugged-55cd9e15-32c1-41e3-8a19-a6a6128fbc87 for instance with vm_state building and task_state spawning. [ 613.354894] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Acquiring lock "fedb62e0-2602-4772-9e5d-00645922d2a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.355057] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Lock "fedb62e0-2602-4772-9e5d-00645922d2a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.169425] env[62740]: DEBUG nova.network.neutron [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Successfully updated port: 51ca1534-f4b4-4041-a119-e853f57f7a51 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 614.169425] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 52e12ee056f244ad8dc504cb61bee717 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 614.184758] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52e12ee056f244ad8dc504cb61bee717 [ 614.185661] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Acquiring lock "refresh_cache-a24df1e4-2865-4ab3-beae-0892dca12bef" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.185961] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Acquired lock "refresh_cache-a24df1e4-2865-4ab3-beae-0892dca12bef" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.186067] env[62740]: DEBUG nova.network.neutron [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 614.186624] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg baec0a7ce1a940dc85a985d57613e196 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 614.194591] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg baec0a7ce1a940dc85a985d57613e196 [ 614.312142] env[62740]: DEBUG nova.network.neutron [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 614.343380] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Acquiring lock "f98589dc-ea7a-44c8-8cca-119d126ea0de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.344570] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Lock "f98589dc-ea7a-44c8-8cca-119d126ea0de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.790535] env[62740]: DEBUG nova.network.neutron [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Updating instance_info_cache with network_info: [{"id": "51ca1534-f4b4-4041-a119-e853f57f7a51", "address": "fa:16:3e:c4:aa:3f", "network": {"id": "6e6c9813-7402-4b22-b4a0-f5bb19c2e252", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1448824178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6ec2bd0781548ea9620d349be9b9ff2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b6c45fd-e930-495a-9cb7-df84eda443b1", "external-id": "nsx-vlan-transportzone-407", "segmentation_id": 407, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51ca1534-f4", "ovs_interfaceid": "51ca1534-f4b4-4041-a119-e853f57f7a51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.795410] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 5f5766cac1e147e1ad4edbcf7e32653d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 614.813383] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f5766cac1e147e1ad4edbcf7e32653d [ 614.814083] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Releasing lock "refresh_cache-a24df1e4-2865-4ab3-beae-0892dca12bef" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.815068] env[62740]: DEBUG nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Instance network_info: |[{"id": "51ca1534-f4b4-4041-a119-e853f57f7a51", "address": "fa:16:3e:c4:aa:3f", "network": {"id": "6e6c9813-7402-4b22-b4a0-f5bb19c2e252", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1448824178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6ec2bd0781548ea9620d349be9b9ff2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b6c45fd-e930-495a-9cb7-df84eda443b1", "external-id": "nsx-vlan-transportzone-407", "segmentation_id": 407, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51ca1534-f4", "ovs_interfaceid": "51ca1534-f4b4-4041-a119-e853f57f7a51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 614.815228] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:aa:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2b6c45fd-e930-495a-9cb7-df84eda443b1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '51ca1534-f4b4-4041-a119-e853f57f7a51', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 614.828261] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Creating folder: Project (f6ec2bd0781548ea9620d349be9b9ff2). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 614.831177] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ede81125-10f0-45bd-9371-7075445f20fa {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.842513] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Created folder: Project (f6ec2bd0781548ea9620d349be9b9ff2) in parent group-v156037. [ 614.842933] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Creating folder: Instances. Parent ref: group-v156068. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 614.843035] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df6dff05-e033-416c-8d84-492f3a7cec92 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.855245] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Created folder: Instances in parent group-v156068. [ 614.855489] env[62740]: DEBUG oslo.service.loopingcall [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 614.855673] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 614.855900] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-efa02ec8-e99d-433c-b16a-dd0bf7a55da5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.878024] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 614.878024] env[62740]: value = "task-640070" [ 614.878024] env[62740]: _type = "Task" [ 614.878024] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.885892] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640070, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.390886] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640070, 'name': CreateVM_Task, 'duration_secs': 0.384668} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.391350] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 615.394046] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.394228] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.394543] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 615.394902] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b63a9aac-e377-4512-a5f3-92e3502d1c7d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.400372] env[62740]: DEBUG oslo_vmware.api [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Waiting for the task: (returnval){ [ 615.400372] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52130f5e-2fed-ae9c-fad4-63f426981e04" [ 615.400372] env[62740]: _type = "Task" [ 615.400372] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.415436] env[62740]: DEBUG oslo_vmware.api [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52130f5e-2fed-ae9c-fad4-63f426981e04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.485290] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquiring lock "fa5248d1-bddf-4244-a363-2113b0473980" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.485540] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "fa5248d1-bddf-4244-a363-2113b0473980" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.918594] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.918803] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 615.919010] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.998252] env[62740]: DEBUG nova.compute.manager [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Received event network-changed-55cd9e15-32c1-41e3-8a19-a6a6128fbc87 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 616.998543] env[62740]: DEBUG nova.compute.manager [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Refreshing instance network info cache due to event network-changed-55cd9e15-32c1-41e3-8a19-a6a6128fbc87. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 616.998668] env[62740]: DEBUG oslo_concurrency.lockutils [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] Acquiring lock "refresh_cache-8053d2ae-ca61-4282-aa89-83f3a2e107bc" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.998813] env[62740]: DEBUG oslo_concurrency.lockutils [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] Acquired lock "refresh_cache-8053d2ae-ca61-4282-aa89-83f3a2e107bc" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.999014] env[62740]: DEBUG nova.network.neutron [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Refreshing network info cache for port 55cd9e15-32c1-41e3-8a19-a6a6128fbc87 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 616.999496] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] Expecting reply to msg be35dfef75de47c388eb04371f317f7c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 617.013574] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be35dfef75de47c388eb04371f317f7c [ 617.514974] env[62740]: DEBUG nova.network.neutron [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Updated VIF entry in instance network info cache for port 55cd9e15-32c1-41e3-8a19-a6a6128fbc87. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 617.515991] env[62740]: DEBUG nova.network.neutron [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Updating instance_info_cache with network_info: [{"id": "55cd9e15-32c1-41e3-8a19-a6a6128fbc87", "address": "fa:16:3e:fa:6d:1d", "network": {"id": "a1bf429f-63e1-4b06-ba31-36e8e686268d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1763096855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "065d149aea7645d7a5e32c0d14ff0936", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55cd9e15-32", "ovs_interfaceid": "55cd9e15-32c1-41e3-8a19-a6a6128fbc87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.517433] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] Expecting reply to msg aea175f738ec4da3955f645c0f62ba8a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 617.528732] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aea175f738ec4da3955f645c0f62ba8a [ 617.529571] env[62740]: DEBUG oslo_concurrency.lockutils [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] Releasing lock "refresh_cache-8053d2ae-ca61-4282-aa89-83f3a2e107bc" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.529684] env[62740]: DEBUG nova.compute.manager [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Received event network-vif-plugged-51ca1534-f4b4-4041-a119-e853f57f7a51 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 617.529903] env[62740]: DEBUG oslo_concurrency.lockutils [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] Acquiring lock "a24df1e4-2865-4ab3-beae-0892dca12bef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.530266] env[62740]: DEBUG oslo_concurrency.lockutils [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] Lock "a24df1e4-2865-4ab3-beae-0892dca12bef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.530479] env[62740]: DEBUG oslo_concurrency.lockutils [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] Lock "a24df1e4-2865-4ab3-beae-0892dca12bef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.530736] env[62740]: DEBUG nova.compute.manager [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] No waiting events found dispatching network-vif-plugged-51ca1534-f4b4-4041-a119-e853f57f7a51 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 617.530944] env[62740]: WARNING nova.compute.manager [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Received unexpected event network-vif-plugged-51ca1534-f4b4-4041-a119-e853f57f7a51 for instance with vm_state building and task_state spawning. [ 617.531139] env[62740]: DEBUG nova.compute.manager [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Received event network-changed-51ca1534-f4b4-4041-a119-e853f57f7a51 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 617.531343] env[62740]: DEBUG nova.compute.manager [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Refreshing instance network info cache due to event network-changed-51ca1534-f4b4-4041-a119-e853f57f7a51. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 617.533070] env[62740]: DEBUG oslo_concurrency.lockutils [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] Acquiring lock "refresh_cache-a24df1e4-2865-4ab3-beae-0892dca12bef" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.533070] env[62740]: DEBUG oslo_concurrency.lockutils [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] Acquired lock "refresh_cache-a24df1e4-2865-4ab3-beae-0892dca12bef" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.533070] env[62740]: DEBUG nova.network.neutron [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Refreshing network info cache for port 51ca1534-f4b4-4041-a119-e853f57f7a51 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 617.533070] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] Expecting reply to msg 1bab984107e244c7bd18b088d4df8f14 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 617.542798] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1bab984107e244c7bd18b088d4df8f14 [ 617.623884] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Acquiring lock "6ec38a6c-f4b2-42ce-b371-5fe82d577545" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.624139] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Lock "6ec38a6c-f4b2-42ce-b371-5fe82d577545" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.029890] env[62740]: DEBUG nova.network.neutron [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Updated VIF entry in instance network info cache for port 51ca1534-f4b4-4041-a119-e853f57f7a51. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 618.030276] env[62740]: DEBUG nova.network.neutron [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Updating instance_info_cache with network_info: [{"id": "51ca1534-f4b4-4041-a119-e853f57f7a51", "address": "fa:16:3e:c4:aa:3f", "network": {"id": "6e6c9813-7402-4b22-b4a0-f5bb19c2e252", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1448824178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6ec2bd0781548ea9620d349be9b9ff2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b6c45fd-e930-495a-9cb7-df84eda443b1", "external-id": "nsx-vlan-transportzone-407", "segmentation_id": 407, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51ca1534-f4", "ovs_interfaceid": "51ca1534-f4b4-4041-a119-e853f57f7a51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.030802] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] Expecting reply to msg 8e91837a8c5e48b78e62ffb42c1d53e7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 618.069023] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e91837a8c5e48b78e62ffb42c1d53e7 [ 618.069791] env[62740]: DEBUG oslo_concurrency.lockutils [req-6c0d8ad9-40b7-4189-9c62-82897390fb69 req-72c92b53-9b0e-478f-a0e2-9142d5069658 service nova] Releasing lock "refresh_cache-a24df1e4-2865-4ab3-beae-0892dca12bef" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.422468] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "26712c18-d9f4-4d7d-80fb-4d527da9c1e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.424453] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "26712c18-d9f4-4d7d-80fb-4d527da9c1e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.507776] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8c680f70-93a6-403b-91ca-5339f8edd931 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Acquiring lock "ab15259f-6344-4ba0-9abd-8b0ee7df59fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.508015] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8c680f70-93a6-403b-91ca-5339f8edd931 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Lock "ab15259f-6344-4ba0-9abd-8b0ee7df59fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.809746] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2117ed-d5eb-4992-88e2-8be9df8b68a2 tempest-ServersWithSpecificFlavorTestJSON-23875077 tempest-ServersWithSpecificFlavorTestJSON-23875077-project-member] Acquiring lock "425930c0-b9f8-4966-ae9d-0687d0a07213" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.810112] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2117ed-d5eb-4992-88e2-8be9df8b68a2 tempest-ServersWithSpecificFlavorTestJSON-23875077 tempest-ServersWithSpecificFlavorTestJSON-23875077-project-member] Lock "425930c0-b9f8-4966-ae9d-0687d0a07213" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.609453] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e5903377-dce5-4298-b3cf-664569c65ee7 tempest-ServersAdmin275Test-2081354469 tempest-ServersAdmin275Test-2081354469-project-member] Acquiring lock "b2ec3212-25e1-4027-801d-a23309a4d0e6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.609773] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e5903377-dce5-4298-b3cf-664569c65ee7 tempest-ServersAdmin275Test-2081354469 tempest-ServersAdmin275Test-2081354469-project-member] Lock "b2ec3212-25e1-4027-801d-a23309a4d0e6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.779762] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7591c0a2-ff4f-4ef4-8115-8fa10d00afd0 tempest-ServersTestBootFromVolume-182780058 tempest-ServersTestBootFromVolume-182780058-project-member] Acquiring lock "1ae43e6d-c9ac-494d-a7a9-1f6ff538345a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.780127] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7591c0a2-ff4f-4ef4-8115-8fa10d00afd0 tempest-ServersTestBootFromVolume-182780058 tempest-ServersTestBootFromVolume-182780058-project-member] Lock "1ae43e6d-c9ac-494d-a7a9-1f6ff538345a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.017882] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0f41c272-e058-4a9b-8675-d44257db4ff9 tempest-ServerGroupTestJSON-1334565079 tempest-ServerGroupTestJSON-1334565079-project-member] Acquiring lock "d490ad64-a2fe-4c08-b0fc-56b2e00d9c98" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.017882] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0f41c272-e058-4a9b-8675-d44257db4ff9 tempest-ServerGroupTestJSON-1334565079 tempest-ServerGroupTestJSON-1334565079-project-member] Lock "d490ad64-a2fe-4c08-b0fc-56b2e00d9c98" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.250581] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9d71d3e1-b8cc-41df-9553-0a1ccd9fd110 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "f7a74539-8a36-450f-aec4-d059670e8f38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.250581] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9d71d3e1-b8cc-41df-9553-0a1ccd9fd110 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "f7a74539-8a36-450f-aec4-d059670e8f38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.058932] env[62740]: DEBUG oslo_concurrency.lockutils [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "e473e254-387f-4581-97bc-bdeab221b10f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.058932] env[62740]: DEBUG oslo_concurrency.lockutils [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "e473e254-387f-4581-97bc-bdeab221b10f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.494362] env[62740]: WARNING oslo_vmware.rw_handles [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 636.494362] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 636.494362] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 636.494362] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 636.494362] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 636.494362] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 636.494362] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 636.494362] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 636.494362] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 636.494362] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 636.494362] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 636.494362] env[62740]: ERROR oslo_vmware.rw_handles [ 636.495412] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/e6f0bcf4-49dd-4bf5-a75c-d3fe5842137c/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 636.496759] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 636.497053] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Copying Virtual Disk [datastore2] vmware_temp/e6f0bcf4-49dd-4bf5-a75c-d3fe5842137c/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/e6f0bcf4-49dd-4bf5-a75c-d3fe5842137c/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 636.497572] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a06fd10a-511e-4f97-b859-27fb460c4b56 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.505760] env[62740]: DEBUG oslo_vmware.api [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Waiting for the task: (returnval){ [ 636.505760] env[62740]: value = "task-640076" [ 636.505760] env[62740]: _type = "Task" [ 636.505760] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.514569] env[62740]: DEBUG oslo_vmware.api [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Task: {'id': task-640076, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.022150] env[62740]: DEBUG oslo_vmware.exceptions [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 637.022323] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.023502] env[62740]: ERROR nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 637.023502] env[62740]: Faults: ['InvalidArgument'] [ 637.023502] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Traceback (most recent call last): [ 637.023502] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 637.023502] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] yield resources [ 637.023502] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 637.023502] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] self.driver.spawn(context, instance, image_meta, [ 637.023502] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 637.023502] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 637.023502] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 637.023502] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] self._fetch_image_if_missing(context, vi) [ 637.023502] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 637.024154] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] image_cache(vi, tmp_image_ds_loc) [ 637.024154] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 637.024154] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] vm_util.copy_virtual_disk( [ 637.024154] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 637.024154] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] session._wait_for_task(vmdk_copy_task) [ 637.024154] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 637.024154] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] return self.wait_for_task(task_ref) [ 637.024154] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 637.024154] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] return evt.wait() [ 637.024154] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 637.024154] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] result = hub.switch() [ 637.024154] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 637.024154] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] return self.greenlet.switch() [ 637.024732] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 637.024732] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] self.f(*self.args, **self.kw) [ 637.024732] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 637.024732] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] raise exceptions.translate_fault(task_info.error) [ 637.024732] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 637.024732] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Faults: ['InvalidArgument'] [ 637.024732] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] [ 637.024732] env[62740]: INFO nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Terminating instance [ 637.026206] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.026401] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 637.026642] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f064fed9-2eeb-47fd-803c-b106e84159e1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.029599] env[62740]: DEBUG nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 637.029956] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 637.031338] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c099220-1251-4cd8-ab3b-c1f721504b28 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.039710] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 637.041144] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc90f1fa-b30e-43b7-a87c-d19cb323fec3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.043337] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 637.043536] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 637.044672] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-797f132d-6606-459b-8c0b-549cf7a8479b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.050613] env[62740]: DEBUG oslo_vmware.api [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Waiting for the task: (returnval){ [ 637.050613] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52d07558-192c-f8f1-4264-e49bc6be2536" [ 637.050613] env[62740]: _type = "Task" [ 637.050613] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.058661] env[62740]: DEBUG oslo_vmware.api [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52d07558-192c-f8f1-4264-e49bc6be2536, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.080870] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Acquiring lock "68aa9321-22ce-45a0-8323-fa8564dca46b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.081130] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Lock "68aa9321-22ce-45a0-8323-fa8564dca46b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.115590] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 637.115590] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 637.115590] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Deleting the datastore file [datastore2] 53f7fc38-a541-4843-883d-da7311445fe5 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 637.115590] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa5d5b39-7088-4ae8-8e22-fbc0f384a1c5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.124980] env[62740]: DEBUG oslo_vmware.api [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Waiting for the task: (returnval){ [ 637.124980] env[62740]: value = "task-640078" [ 637.124980] env[62740]: _type = "Task" [ 637.124980] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.135879] env[62740]: DEBUG oslo_vmware.api [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Task: {'id': task-640078, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.223694] env[62740]: DEBUG oslo_concurrency.lockutils [None req-95318742-84af-4fb6-ad3f-3c5466ba567e tempest-TenantUsagesTestJSON-2045170971 tempest-TenantUsagesTestJSON-2045170971-project-member] Acquiring lock "4ec9a397-1e4b-4767-b926-ccc6f63a951c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.224069] env[62740]: DEBUG oslo_concurrency.lockutils [None req-95318742-84af-4fb6-ad3f-3c5466ba567e tempest-TenantUsagesTestJSON-2045170971 tempest-TenantUsagesTestJSON-2045170971-project-member] Lock "4ec9a397-1e4b-4767-b926-ccc6f63a951c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.561925] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 637.562203] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Creating directory with path [datastore2] vmware_temp/861adc22-db47-479c-835a-476fff7ffcfb/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 637.562439] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-326a09ce-24b3-4bec-a462-774e2eaeed3c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.575997] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Created directory with path [datastore2] vmware_temp/861adc22-db47-479c-835a-476fff7ffcfb/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 637.576218] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Fetch image to [datastore2] vmware_temp/861adc22-db47-479c-835a-476fff7ffcfb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 637.576387] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/861adc22-db47-479c-835a-476fff7ffcfb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 637.577181] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71bbe74-8faa-4a4d-97f0-3685ee55fcd8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.585703] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb44d91c-6647-4856-82e9-f1ec045bbdbb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.597412] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc9316d-e669-41d5-9ccd-24eed26cad03 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.637680] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2031ab-d4a7-4b4d-bff9-5ce40bf32f31 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.649362] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ecd46d19-f6d2-4087-8420-797b4f6ea4ee {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.652198] env[62740]: DEBUG oslo_vmware.api [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Task: {'id': task-640078, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068366} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.652517] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 637.652919] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 637.653280] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 637.653417] env[62740]: INFO nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Took 0.62 seconds to destroy the instance on the hypervisor. [ 637.656602] env[62740]: DEBUG nova.compute.claims [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 637.656771] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.657022] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.659281] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg b946a67718584a2e98088d6ac10b11fe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 637.679355] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 637.700076] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b946a67718584a2e98088d6ac10b11fe [ 637.752131] env[62740]: DEBUG oslo_vmware.rw_handles [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/861adc22-db47-479c-835a-476fff7ffcfb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 637.819252] env[62740]: DEBUG oslo_vmware.rw_handles [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 637.819562] env[62740]: DEBUG oslo_vmware.rw_handles [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/861adc22-db47-479c-835a-476fff7ffcfb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 638.127895] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223ceeaf-2983-40ad-8ed2-805d40042f16 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.136393] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ddd5ea-624f-4159-b16a-0bd7846801cb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.166557] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75605b5c-4fd0-4414-ba3b-191bd9d9281a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.174490] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564ed485-dac5-4e47-b61e-1e6310fac810 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.188921] env[62740]: DEBUG nova.compute.provider_tree [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.193019] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg eee920fdb2884b81841b547d0fea52ac in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 638.200896] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eee920fdb2884b81841b547d0fea52ac [ 638.202042] env[62740]: DEBUG nova.scheduler.client.report [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 638.205157] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg 3fead88cdb544fbbb05a41051ec0db20 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 638.222434] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3fead88cdb544fbbb05a41051ec0db20 [ 638.224094] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.566s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.224094] env[62740]: ERROR nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 638.224094] env[62740]: Faults: ['InvalidArgument'] [ 638.224094] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Traceback (most recent call last): [ 638.224094] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 638.224094] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] self.driver.spawn(context, instance, image_meta, [ 638.224094] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 638.224094] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 638.224094] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 638.224094] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] self._fetch_image_if_missing(context, vi) [ 638.224552] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 638.224552] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] image_cache(vi, tmp_image_ds_loc) [ 638.224552] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 638.224552] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] vm_util.copy_virtual_disk( [ 638.224552] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 638.224552] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] session._wait_for_task(vmdk_copy_task) [ 638.224552] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 638.224552] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] return self.wait_for_task(task_ref) [ 638.224552] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 638.224552] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] return evt.wait() [ 638.224552] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 638.224552] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] result = hub.switch() [ 638.224552] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 638.224920] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] return self.greenlet.switch() [ 638.224920] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 638.224920] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] self.f(*self.args, **self.kw) [ 638.224920] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 638.224920] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] raise exceptions.translate_fault(task_info.error) [ 638.224920] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 638.224920] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Faults: ['InvalidArgument'] [ 638.224920] env[62740]: ERROR nova.compute.manager [instance: 53f7fc38-a541-4843-883d-da7311445fe5] [ 638.224920] env[62740]: DEBUG nova.compute.utils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 638.226272] env[62740]: DEBUG nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Build of instance 53f7fc38-a541-4843-883d-da7311445fe5 was re-scheduled: A specified parameter was not correct: fileType [ 638.226272] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 638.226442] env[62740]: DEBUG nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 638.226619] env[62740]: DEBUG nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 638.226795] env[62740]: DEBUG nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 638.226959] env[62740]: DEBUG nova.network.neutron [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 638.710963] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg d3d53efb7c7f4faba28ec76aa026fcf3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 638.727513] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3d53efb7c7f4faba28ec76aa026fcf3 [ 638.727513] env[62740]: DEBUG nova.network.neutron [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.728203] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg 4759ebb832ff4f198b343aff9168a810 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 638.750691] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4759ebb832ff4f198b343aff9168a810 [ 638.751426] env[62740]: INFO nova.compute.manager [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] Took 0.52 seconds to deallocate network for instance. [ 638.753102] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg d33405e51ad84be287ac0b8c5d5fecb6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 638.797268] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d33405e51ad84be287ac0b8c5d5fecb6 [ 638.800834] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg 2b6b972ca5ea41c589f70ac220312f44 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 638.841080] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b6b972ca5ea41c589f70ac220312f44 [ 638.859210] env[62740]: DEBUG oslo_concurrency.lockutils [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Acquiring lock "e1c7a748-b3f3-41b7-8784-13699549a01d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.859725] env[62740]: DEBUG oslo_concurrency.lockutils [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Lock "e1c7a748-b3f3-41b7-8784-13699549a01d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.889284] env[62740]: INFO nova.scheduler.client.report [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Deleted allocations for instance 53f7fc38-a541-4843-883d-da7311445fe5 [ 638.893999] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Expecting reply to msg 2610c92cb9564dd68e4a51941cf052c7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 638.897708] env[62740]: DEBUG oslo_concurrency.lockutils [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Acquiring lock "25cef75e-2176-4999-965b-155cd7f8d137" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.898478] env[62740]: DEBUG oslo_concurrency.lockutils [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Lock "25cef75e-2176-4999-965b-155cd7f8d137" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.904479] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2610c92cb9564dd68e4a51941cf052c7 [ 638.907197] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dfc153cf-6b43-410f-b5aa-fd688df2985a tempest-FloatingIPsAssociationTestJSON-827292912 tempest-FloatingIPsAssociationTestJSON-827292912-project-member] Lock "53f7fc38-a541-4843-883d-da7311445fe5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.070s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.907197] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg 53d6c2bf16b949dab2a40e08b0eec9b1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 638.907197] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "53f7fc38-a541-4843-883d-da7311445fe5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 56.673s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.907197] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 53f7fc38-a541-4843-883d-da7311445fe5] During sync_power_state the instance has a pending task (spawning). Skip. [ 638.907521] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "53f7fc38-a541-4843-883d-da7311445fe5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.922571] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53d6c2bf16b949dab2a40e08b0eec9b1 [ 638.923414] env[62740]: DEBUG nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 638.926949] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg 5f3cb27c1ed54d6f9d983647a2422b5c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 638.962347] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f3cb27c1ed54d6f9d983647a2422b5c [ 638.985228] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.985322] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.986940] env[62740]: INFO nova.compute.claims [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 638.991529] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg 03d21f7311bf4388a7b29bca71e768e0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 639.031699] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03d21f7311bf4388a7b29bca71e768e0 [ 639.033473] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg 4d783eb150f64b4faeba3ddcacdf82e5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 639.040751] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d783eb150f64b4faeba3ddcacdf82e5 [ 639.467540] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75099a00-1e7a-48e3-a623-ea03fe4b5f89 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.479479] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9c0512-d271-451d-a808-735b0bbb88d1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.528606] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51692abe-db9b-460d-8d3b-cfb15842b429 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.539746] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc22545-2dac-4b07-884a-77fd91a630e4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.562746] env[62740]: DEBUG nova.compute.provider_tree [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.563577] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg 291a9e964e7544fbaa84af1eabbd946c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 639.576253] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 291a9e964e7544fbaa84af1eabbd946c [ 639.577620] env[62740]: DEBUG nova.scheduler.client.report [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 639.581688] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg 07a40607a87b425caec63d87052adeb1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 639.600513] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07a40607a87b425caec63d87052adeb1 [ 639.602106] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.617s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.602832] env[62740]: DEBUG nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 639.605626] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg 510aa706ec5e493daf594f8f17f84b7c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 639.646138] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 510aa706ec5e493daf594f8f17f84b7c [ 639.647725] env[62740]: DEBUG nova.compute.utils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 639.651022] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg 4aef3d5cc8504a13b4e07f1b82d10640 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 639.651022] env[62740]: DEBUG nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 639.651022] env[62740]: DEBUG nova.network.neutron [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 639.659403] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4aef3d5cc8504a13b4e07f1b82d10640 [ 639.661606] env[62740]: DEBUG nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 639.661772] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg ac6542574c8c4bb2a85179143ff60fe3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 639.695164] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac6542574c8c4bb2a85179143ff60fe3 [ 639.698450] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg 0b7344ce64a64e209331f9e2afb34479 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 639.739077] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b7344ce64a64e209331f9e2afb34479 [ 639.740450] env[62740]: DEBUG nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 639.765544] env[62740]: DEBUG nova.policy [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e12871fbd0042c4ab07323a29c659f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7b0f5a961cdf49f4aa10fdd6691b6681', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 639.784013] env[62740]: DEBUG nova.virt.hardware [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 639.784439] env[62740]: DEBUG nova.virt.hardware [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 639.784778] env[62740]: DEBUG nova.virt.hardware [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 639.785107] env[62740]: DEBUG nova.virt.hardware [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 639.785360] env[62740]: DEBUG nova.virt.hardware [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 639.785744] env[62740]: DEBUG nova.virt.hardware [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 639.786050] env[62740]: DEBUG nova.virt.hardware [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 639.786334] env[62740]: DEBUG nova.virt.hardware [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 639.786777] env[62740]: DEBUG nova.virt.hardware [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 639.786880] env[62740]: DEBUG nova.virt.hardware [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 639.787290] env[62740]: DEBUG nova.virt.hardware [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 639.788751] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a5b063-feda-4b7a-92b1-f48f12d6d135 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.801296] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2959ff77-47bb-4455-a29c-4af8b71ed49a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.698021] env[62740]: DEBUG nova.network.neutron [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Successfully created port: f7bd271d-dd64-4982-ae97-b8fb3cee3b91 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 641.285488] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.287205] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 00784d7869ea45e7b71fd6dd6b56cdc2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.320016] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00784d7869ea45e7b71fd6dd6b56cdc2 [ 641.327009] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.328545] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 641.328545] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 641.328545] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 48a758958e36475eaf84b59a45c5ec79 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.358904] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48a758958e36475eaf84b59a45c5ec79 [ 641.361126] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 641.361282] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 641.361416] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 641.361543] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: fece072a-baac-4301-988c-0068d6e71cff] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 641.361665] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 641.361821] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 641.361896] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 641.362027] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 641.362160] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 641.362285] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 641.362400] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 641.362893] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.363044] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 641.363242] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.364463] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b8dfc5395e8045db8e13c47c83083255 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.374295] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8dfc5395e8045db8e13c47c83083255 [ 641.375301] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.375472] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.375642] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.375794] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 641.378618] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-707edc31-d9e9-4973-9a04-97dc585b711e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.390188] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d1da58-160f-4a49-a6f2-939417014ce9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.407048] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffbd1d28-0a7b-4cc4-8449-ab3704502a70 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.415175] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1392d1c8-8240-41b9-a5f6-df2c0f0137ec {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.449495] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181645MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 641.450041] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.450041] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.450756] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 68f0e80af4c64cb29e26f78705041444 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.494780] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68f0e80af4c64cb29e26f78705041444 [ 641.499106] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg e6deca8a4a9f42c699732b2450dbc97c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.525905] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6deca8a4a9f42c699732b2450dbc97c [ 641.550747] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 61b01264-eb0f-410c-8b39-971b95d16bb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 641.550891] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 9d175573-2af2-4f66-98cd-411d10f749f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 641.551033] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b326be2c-43f2-4f04-9652-cec7e017288e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 641.551158] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance fece072a-baac-4301-988c-0068d6e71cff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 641.551274] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3102cc87-df1a-4de8-bfdb-9b904f40ea2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 641.551388] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4f0d1356-bdfb-4cb2-979a-e28f9025b311 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 641.551501] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 75050b95-60c6-4e44-a1d5-0d47492dd739 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 641.551611] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 8053d2ae-ca61-4282-aa89-83f3a2e107bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 641.551725] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a24df1e4-2865-4ab3-beae-0892dca12bef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 641.551923] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4f89b342-0375-48f5-b5cf-713a8d57a182 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 641.552438] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 1a3d5a3766dd441bb08e635b6ade3b38 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.597049] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a3d5a3766dd441bb08e635b6ade3b38 [ 641.597876] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.598558] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0e95b87ac34743e99fcc10cd9843d057 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.631646] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e95b87ac34743e99fcc10cd9843d057 [ 641.631881] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance fedb62e0-2602-4772-9e5d-00645922d2a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.632319] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 65703ff39a36453fb69cd0df29b7a0b8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.644778] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65703ff39a36453fb69cd0df29b7a0b8 [ 641.645528] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f98589dc-ea7a-44c8-8cca-119d126ea0de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.646216] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg e7cd2321d4cc4b358145cead3bfc6761 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.661581] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7cd2321d4cc4b358145cead3bfc6761 [ 641.662643] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance fa5248d1-bddf-4244-a363-2113b0473980 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.662877] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 6ad1cd8d1f8e4ec78c64a5c6e97b8743 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.673505] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ad1cd8d1f8e4ec78c64a5c6e97b8743 [ 641.674193] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6ec38a6c-f4b2-42ce-b371-5fe82d577545 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.674663] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8ed1195cd4af47ec8ddb710b837a6a6a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.688772] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ed1195cd4af47ec8ddb710b837a6a6a [ 641.689642] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 26712c18-d9f4-4d7d-80fb-4d527da9c1e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.690566] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 18dc65beab5f4dedb0d185e7acc01d32 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.703131] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18dc65beab5f4dedb0d185e7acc01d32 [ 641.703131] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance ab15259f-6344-4ba0-9abd-8b0ee7df59fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.703276] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 543798da3f28431ca6f3bd4a6fc91f84 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.717899] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 543798da3f28431ca6f3bd4a6fc91f84 [ 641.720563] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 425930c0-b9f8-4966-ae9d-0687d0a07213 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.720563] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 3c5a6183c6bd4505b7c359f465ba68b8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.736630] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c5a6183c6bd4505b7c359f465ba68b8 [ 641.737598] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b2ec3212-25e1-4027-801d-a23309a4d0e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.737919] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 4483c9c8d8fc401297f89d33f109194b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.751242] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4483c9c8d8fc401297f89d33f109194b [ 641.752060] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1ae43e6d-c9ac-494d-a7a9-1f6ff538345a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.753121] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 659ca7d8d0f1444a90a4c2c301d7ba2e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.769147] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 659ca7d8d0f1444a90a4c2c301d7ba2e [ 641.769992] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d490ad64-a2fe-4c08-b0fc-56b2e00d9c98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.770593] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 95a19ae366ff43aa9dd6d46ff04767b9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.786379] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95a19ae366ff43aa9dd6d46ff04767b9 [ 641.787262] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f7a74539-8a36-450f-aec4-d059670e8f38 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.788178] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 31c8ecb9863440f2a50ee8dcf4fcf3ac in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.801262] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31c8ecb9863440f2a50ee8dcf4fcf3ac [ 641.803011] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e473e254-387f-4581-97bc-bdeab221b10f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.803011] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 377b20aea76b4966a354ce3d3ba936ac in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.816028] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 377b20aea76b4966a354ce3d3ba936ac [ 641.816974] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 68aa9321-22ce-45a0-8323-fa8564dca46b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.817655] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b73ddee2cccd491c9571cb832a67d67e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.835561] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b73ddee2cccd491c9571cb832a67d67e [ 641.836247] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4ec9a397-1e4b-4767-b926-ccc6f63a951c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.836856] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 035315a653574136a1b5e550040d4b6a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.848390] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 035315a653574136a1b5e550040d4b6a [ 641.849615] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e1c7a748-b3f3-41b7-8784-13699549a01d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.851474] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 1201645e03f24e8ea7e6558845d1dcbd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 641.866890] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1201645e03f24e8ea7e6558845d1dcbd [ 641.870918] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 25cef75e-2176-4999-965b-155cd7f8d137 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.871200] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 641.871368] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 642.312048] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db6845a-e4a6-46e8-850a-d2bbd62cc1a8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.320824] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c2a268-8316-4cf7-ab0b-27b1f4f603b6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.355644] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e192b00-0ae7-4044-9878-af5f33ba484f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.363276] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28361f50-06a5-4cd4-a9d2-caca8ab0ee62 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.378872] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.378872] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 1b31bc1f16c041bc92daad44ed56b913 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 642.387884] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b31bc1f16c041bc92daad44ed56b913 [ 642.388846] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 642.391240] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 1ad0eb4a939f4060ba2e71b3c275a61c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 642.415314] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ad0eb4a939f4060ba2e71b3c275a61c [ 642.416131] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 642.416857] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.966s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.519661] env[62740]: DEBUG nova.network.neutron [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Successfully updated port: f7bd271d-dd64-4982-ae97-b8fb3cee3b91 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 642.520758] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg edb5d44d25324528b07dd7bcb0713aae in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 642.534539] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edb5d44d25324528b07dd7bcb0713aae [ 642.536582] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Acquiring lock "refresh_cache-4f89b342-0375-48f5-b5cf-713a8d57a182" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.536582] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Acquired lock "refresh_cache-4f89b342-0375-48f5-b5cf-713a8d57a182" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.536582] env[62740]: DEBUG nova.network.neutron [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 642.536920] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg 5086095814e645288dfad70ebba6fb8e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 642.544793] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5086095814e645288dfad70ebba6fb8e [ 642.699521] env[62740]: DEBUG nova.network.neutron [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 642.945487] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.945487] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.945487] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.945487] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.945487] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.945794] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.371827] env[62740]: DEBUG nova.compute.manager [req-cfbed8f9-2be2-4031-bcd6-a94cacad2359 req-363d6058-033a-4819-9e7e-2891a2bd38c9 service nova] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Received event network-vif-plugged-f7bd271d-dd64-4982-ae97-b8fb3cee3b91 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 643.372774] env[62740]: DEBUG oslo_concurrency.lockutils [req-cfbed8f9-2be2-4031-bcd6-a94cacad2359 req-363d6058-033a-4819-9e7e-2891a2bd38c9 service nova] Acquiring lock "4f89b342-0375-48f5-b5cf-713a8d57a182-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.372774] env[62740]: DEBUG oslo_concurrency.lockutils [req-cfbed8f9-2be2-4031-bcd6-a94cacad2359 req-363d6058-033a-4819-9e7e-2891a2bd38c9 service nova] Lock "4f89b342-0375-48f5-b5cf-713a8d57a182-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.372774] env[62740]: DEBUG oslo_concurrency.lockutils [req-cfbed8f9-2be2-4031-bcd6-a94cacad2359 req-363d6058-033a-4819-9e7e-2891a2bd38c9 service nova] Lock "4f89b342-0375-48f5-b5cf-713a8d57a182-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.372774] env[62740]: DEBUG nova.compute.manager [req-cfbed8f9-2be2-4031-bcd6-a94cacad2359 req-363d6058-033a-4819-9e7e-2891a2bd38c9 service nova] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] No waiting events found dispatching network-vif-plugged-f7bd271d-dd64-4982-ae97-b8fb3cee3b91 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 643.373065] env[62740]: WARNING nova.compute.manager [req-cfbed8f9-2be2-4031-bcd6-a94cacad2359 req-363d6058-033a-4819-9e7e-2891a2bd38c9 service nova] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Received unexpected event network-vif-plugged-f7bd271d-dd64-4982-ae97-b8fb3cee3b91 for instance with vm_state building and task_state spawning. [ 643.584235] env[62740]: DEBUG nova.network.neutron [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Updating instance_info_cache with network_info: [{"id": "f7bd271d-dd64-4982-ae97-b8fb3cee3b91", "address": "fa:16:3e:b1:21:22", "network": {"id": "6b268031-9f16-4f60-881b-267c107afac5", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1012331346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7b0f5a961cdf49f4aa10fdd6691b6681", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7bd271d-dd", "ovs_interfaceid": "f7bd271d-dd64-4982-ae97-b8fb3cee3b91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.584235] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg 362290a65b624d65adc645eefa119663 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 643.597847] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 362290a65b624d65adc645eefa119663 [ 643.598550] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Releasing lock "refresh_cache-4f89b342-0375-48f5-b5cf-713a8d57a182" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.598841] env[62740]: DEBUG nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Instance network_info: |[{"id": "f7bd271d-dd64-4982-ae97-b8fb3cee3b91", "address": "fa:16:3e:b1:21:22", "network": {"id": "6b268031-9f16-4f60-881b-267c107afac5", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1012331346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7b0f5a961cdf49f4aa10fdd6691b6681", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7bd271d-dd", "ovs_interfaceid": "f7bd271d-dd64-4982-ae97-b8fb3cee3b91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 643.599280] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:21:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7bd271d-dd64-4982-ae97-b8fb3cee3b91', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 643.607497] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Creating folder: Project (7b0f5a961cdf49f4aa10fdd6691b6681). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 643.608214] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b8f122f-4012-40ee-9073-9cc850b9544e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.619327] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Created folder: Project (7b0f5a961cdf49f4aa10fdd6691b6681) in parent group-v156037. [ 643.619462] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Creating folder: Instances. Parent ref: group-v156072. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 643.619680] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1deed91-20ce-4460-8ffa-c3de5bdf3b70 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.633335] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Created folder: Instances in parent group-v156072. [ 643.633335] env[62740]: DEBUG oslo.service.loopingcall [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 643.633335] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 643.633335] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-31c64376-97c3-4e9d-85ba-20c0165ad36a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.654348] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 643.654348] env[62740]: value = "task-640081" [ 643.654348] env[62740]: _type = "Task" [ 643.654348] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.662720] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640081, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.167644] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640081, 'name': CreateVM_Task} progress is 99%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.670300] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640081, 'name': CreateVM_Task} progress is 99%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.169638] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640081, 'name': CreateVM_Task, 'duration_secs': 1.349729} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.169830] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 645.171070] env[62740]: DEBUG oslo_vmware.service [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764bd739-712a-45b5-80d5-2a10901119ca {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.178042] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.178042] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.178430] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 645.178735] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cf27f2c-0008-4031-9e12-167513f0a624 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.184593] env[62740]: DEBUG oslo_vmware.api [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Waiting for the task: (returnval){ [ 645.184593] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52d5418e-d373-aa03-8c40-4c214afab4e8" [ 645.184593] env[62740]: _type = "Task" [ 645.184593] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.194022] env[62740]: DEBUG oslo_vmware.api [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52d5418e-d373-aa03-8c40-4c214afab4e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.695101] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.695399] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 645.695588] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.696388] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.696388] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 645.696388] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14e98c08-973a-4f91-9acd-7628b6d0edbf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.704678] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 645.704872] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 645.705672] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e7d070-cc2e-47dc-b255-05f71c10610e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.711802] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b8f95e4-a909-4d53-9543-f3322d3b7957 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.720192] env[62740]: DEBUG oslo_vmware.api [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Waiting for the task: (returnval){ [ 645.720192] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52003ef8-0f50-6f65-9112-1b0b53f70c95" [ 645.720192] env[62740]: _type = "Task" [ 645.720192] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.727014] env[62740]: DEBUG oslo_vmware.api [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52003ef8-0f50-6f65-9112-1b0b53f70c95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.154917] env[62740]: DEBUG nova.compute.manager [req-b0e2001c-3e07-4518-801c-04aaf7e647d7 req-0af4f82c-4a12-45c0-9fb7-becbca96239b service nova] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Received event network-changed-f7bd271d-dd64-4982-ae97-b8fb3cee3b91 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 646.155316] env[62740]: DEBUG nova.compute.manager [req-b0e2001c-3e07-4518-801c-04aaf7e647d7 req-0af4f82c-4a12-45c0-9fb7-becbca96239b service nova] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Refreshing instance network info cache due to event network-changed-f7bd271d-dd64-4982-ae97-b8fb3cee3b91. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 646.155571] env[62740]: DEBUG oslo_concurrency.lockutils [req-b0e2001c-3e07-4518-801c-04aaf7e647d7 req-0af4f82c-4a12-45c0-9fb7-becbca96239b service nova] Acquiring lock "refresh_cache-4f89b342-0375-48f5-b5cf-713a8d57a182" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.156947] env[62740]: DEBUG oslo_concurrency.lockutils [req-b0e2001c-3e07-4518-801c-04aaf7e647d7 req-0af4f82c-4a12-45c0-9fb7-becbca96239b service nova] Acquired lock "refresh_cache-4f89b342-0375-48f5-b5cf-713a8d57a182" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.156947] env[62740]: DEBUG nova.network.neutron [req-b0e2001c-3e07-4518-801c-04aaf7e647d7 req-0af4f82c-4a12-45c0-9fb7-becbca96239b service nova] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Refreshing network info cache for port f7bd271d-dd64-4982-ae97-b8fb3cee3b91 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 646.156947] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-b0e2001c-3e07-4518-801c-04aaf7e647d7 req-0af4f82c-4a12-45c0-9fb7-becbca96239b service nova] Expecting reply to msg 1f6cbad9b8fc4f8fb4b505cfe77c848e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 646.167773] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f6cbad9b8fc4f8fb4b505cfe77c848e [ 646.237084] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 646.237434] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Creating directory with path [datastore1] vmware_temp/9fa89c00-b565-4459-bacd-e9e13746e171/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 646.239852] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-609aa672-2405-4471-b496-b30932b34a62 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.270911] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Created directory with path [datastore1] vmware_temp/9fa89c00-b565-4459-bacd-e9e13746e171/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 646.270911] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Fetch image to [datastore1] vmware_temp/9fa89c00-b565-4459-bacd-e9e13746e171/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 646.270911] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/9fa89c00-b565-4459-bacd-e9e13746e171/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 646.270911] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1282fa6c-9e1b-4718-8d23-4c3a4e3c49da {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.281398] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549570fc-dce7-4490-b094-4fec07c16d28 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.294093] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc75548-5da3-4600-b062-9c84faa7c765 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.327471] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de99cde7-9459-4173-a15a-33b9e12dbddf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.336537] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-49b111bd-2286-4721-ade3-8e2478944d50 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.361344] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 646.440397] env[62740]: DEBUG oslo_vmware.rw_handles [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9fa89c00-b565-4459-bacd-e9e13746e171/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 646.523058] env[62740]: DEBUG oslo_vmware.rw_handles [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 646.523254] env[62740]: DEBUG oslo_vmware.rw_handles [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9fa89c00-b565-4459-bacd-e9e13746e171/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 646.829102] env[62740]: DEBUG nova.network.neutron [req-b0e2001c-3e07-4518-801c-04aaf7e647d7 req-0af4f82c-4a12-45c0-9fb7-becbca96239b service nova] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Updated VIF entry in instance network info cache for port f7bd271d-dd64-4982-ae97-b8fb3cee3b91. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 646.831111] env[62740]: DEBUG nova.network.neutron [req-b0e2001c-3e07-4518-801c-04aaf7e647d7 req-0af4f82c-4a12-45c0-9fb7-becbca96239b service nova] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Updating instance_info_cache with network_info: [{"id": "f7bd271d-dd64-4982-ae97-b8fb3cee3b91", "address": "fa:16:3e:b1:21:22", "network": {"id": "6b268031-9f16-4f60-881b-267c107afac5", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1012331346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7b0f5a961cdf49f4aa10fdd6691b6681", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7bd271d-dd", "ovs_interfaceid": "f7bd271d-dd64-4982-ae97-b8fb3cee3b91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.831111] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-b0e2001c-3e07-4518-801c-04aaf7e647d7 req-0af4f82c-4a12-45c0-9fb7-becbca96239b service nova] Expecting reply to msg eb037c25ebfd428fa269f6263ad9f5be in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 646.841364] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb037c25ebfd428fa269f6263ad9f5be [ 646.842333] env[62740]: DEBUG oslo_concurrency.lockutils [req-b0e2001c-3e07-4518-801c-04aaf7e647d7 req-0af4f82c-4a12-45c0-9fb7-becbca96239b service nova] Releasing lock "refresh_cache-4f89b342-0375-48f5-b5cf-713a8d57a182" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.365891] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Acquiring lock "0e5caaed-20ff-40bd-b0cf-016ac18642cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.366254] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Lock "0e5caaed-20ff-40bd-b0cf-016ac18642cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.139609] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4951cca0-017f-4011-8501-69ae0db3c99a tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Acquiring lock "b9840eca-ec5f-4a8c-9bdf-1212e2640e5c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.140182] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4951cca0-017f-4011-8501-69ae0db3c99a tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Lock "b9840eca-ec5f-4a8c-9bdf-1212e2640e5c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.176399] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 52d5e46803954e1bbd212f48070bc597 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 650.192550] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52d5e46803954e1bbd212f48070bc597 [ 651.491804] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2b9d9caf-5b29-49ed-8567-a5e8da717dad tempest-ServerActionsTestOtherA-296534188 tempest-ServerActionsTestOtherA-296534188-project-member] Acquiring lock "ec703551-5c8d-43bb-b727-709aeeeac9a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.492154] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2b9d9caf-5b29-49ed-8567-a5e8da717dad tempest-ServerActionsTestOtherA-296534188 tempest-ServerActionsTestOtherA-296534188-project-member] Lock "ec703551-5c8d-43bb-b727-709aeeeac9a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.174879] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Acquiring lock "00085df9-ce61-4ccc-8ecf-16956109eb8f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.175169] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Lock "00085df9-ce61-4ccc-8ecf-16956109eb8f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.542358] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5796bde4-492e-4f25-ac8c-fcab9c3edbaf tempest-ServerDiagnosticsV248Test-1188391784 tempest-ServerDiagnosticsV248Test-1188391784-project-member] Acquiring lock "e5b0daa1-6745-48ad-8e69-6c7362bac085" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.542644] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5796bde4-492e-4f25-ac8c-fcab9c3edbaf tempest-ServerDiagnosticsV248Test-1188391784 tempest-ServerDiagnosticsV248Test-1188391784-project-member] Lock "e5b0daa1-6745-48ad-8e69-6c7362bac085" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.965732] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d4d90b3a-0418-4a35-a0e0-36e4fae38c8a tempest-ServersTestJSON-1664078512 tempest-ServersTestJSON-1664078512-project-member] Acquiring lock "e8bb18d5-207c-48fb-b7e5-06b72ce61b4f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.966035] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d4d90b3a-0418-4a35-a0e0-36e4fae38c8a tempest-ServersTestJSON-1664078512 tempest-ServersTestJSON-1664078512-project-member] Lock "e8bb18d5-207c-48fb-b7e5-06b72ce61b4f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.290997] env[62740]: WARNING oslo_vmware.rw_handles [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 686.290997] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 686.290997] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 686.290997] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 686.290997] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 686.290997] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 686.290997] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 686.290997] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 686.290997] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 686.290997] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 686.290997] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 686.290997] env[62740]: ERROR oslo_vmware.rw_handles [ 686.291585] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/861adc22-db47-479c-835a-476fff7ffcfb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 686.293293] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 686.293575] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Copying Virtual Disk [datastore2] vmware_temp/861adc22-db47-479c-835a-476fff7ffcfb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/861adc22-db47-479c-835a-476fff7ffcfb/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 686.293890] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f9654c1-9118-49e1-9352-b2eab1ee83a1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.301509] env[62740]: DEBUG oslo_vmware.api [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Waiting for the task: (returnval){ [ 686.301509] env[62740]: value = "task-640082" [ 686.301509] env[62740]: _type = "Task" [ 686.301509] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.309139] env[62740]: DEBUG oslo_vmware.api [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Task: {'id': task-640082, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.812374] env[62740]: DEBUG oslo_vmware.exceptions [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 686.812822] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.813489] env[62740]: ERROR nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 686.813489] env[62740]: Faults: ['InvalidArgument'] [ 686.813489] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Traceback (most recent call last): [ 686.813489] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 686.813489] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] yield resources [ 686.813489] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 686.813489] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] self.driver.spawn(context, instance, image_meta, [ 686.813489] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 686.813489] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 686.813489] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 686.813489] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] self._fetch_image_if_missing(context, vi) [ 686.813489] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 686.813816] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] image_cache(vi, tmp_image_ds_loc) [ 686.813816] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 686.813816] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] vm_util.copy_virtual_disk( [ 686.813816] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 686.813816] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] session._wait_for_task(vmdk_copy_task) [ 686.813816] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 686.813816] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] return self.wait_for_task(task_ref) [ 686.813816] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 686.813816] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] return evt.wait() [ 686.813816] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 686.813816] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] result = hub.switch() [ 686.813816] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 686.813816] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] return self.greenlet.switch() [ 686.814187] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 686.814187] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] self.f(*self.args, **self.kw) [ 686.814187] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 686.814187] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] raise exceptions.translate_fault(task_info.error) [ 686.814187] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 686.814187] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Faults: ['InvalidArgument'] [ 686.814187] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] [ 686.814187] env[62740]: INFO nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Terminating instance [ 686.815642] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.815855] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 686.816170] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5111bdef-bad2-4207-9f43-2d3923fb7f3d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.818739] env[62740]: DEBUG nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 686.818937] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 686.819700] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df69f9f-1710-407e-b672-cee75d6ceeb1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.826645] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 686.826868] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-028d8950-e701-4c0c-846b-71e881f91697 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.829126] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 686.829304] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 686.830278] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fed0134b-d11a-4767-985d-e71c91044256 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.835007] env[62740]: DEBUG oslo_vmware.api [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Waiting for the task: (returnval){ [ 686.835007] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]522c2d52-d707-0c98-85dc-3b701a28e150" [ 686.835007] env[62740]: _type = "Task" [ 686.835007] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.842186] env[62740]: DEBUG oslo_vmware.api [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]522c2d52-d707-0c98-85dc-3b701a28e150, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.894874] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 686.895138] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 686.895325] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Deleting the datastore file [datastore2] 61b01264-eb0f-410c-8b39-971b95d16bb9 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 686.895589] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d9467f3-d6a7-4128-b175-f392c8e465e9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.903071] env[62740]: DEBUG oslo_vmware.api [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Waiting for the task: (returnval){ [ 686.903071] env[62740]: value = "task-640084" [ 686.903071] env[62740]: _type = "Task" [ 686.903071] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.911739] env[62740]: DEBUG oslo_vmware.api [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Task: {'id': task-640084, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.345614] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 687.345865] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Creating directory with path [datastore2] vmware_temp/29703b3d-5009-405a-b4e8-274fcdf36d31/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 687.346123] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-879c7165-71f9-438e-adc7-8e4ec5bce1a6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.358365] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Created directory with path [datastore2] vmware_temp/29703b3d-5009-405a-b4e8-274fcdf36d31/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 687.358560] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Fetch image to [datastore2] vmware_temp/29703b3d-5009-405a-b4e8-274fcdf36d31/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 687.358834] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/29703b3d-5009-405a-b4e8-274fcdf36d31/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 687.359517] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff2c4e8-32dc-46b2-af74-5d6d3a19b5a0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.366159] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-014bf16f-7afb-464d-ba8a-23602d8df00b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.375009] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8be6a6-727b-49ce-9402-7d884f65c6ca {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.408231] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c43caf1-7b24-4625-aa0c-184ddedd7a25 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.415058] env[62740]: DEBUG oslo_vmware.api [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Task: {'id': task-640084, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097715} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.416553] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 687.416745] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 687.416919] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 687.417112] env[62740]: INFO nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Took 0.60 seconds to destroy the instance on the hypervisor. [ 687.418882] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0ca6c994-998a-4b9c-9df9-c1bbe1b4610c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.420844] env[62740]: DEBUG nova.compute.claims [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 687.421028] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.421247] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.423131] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg 79441aa3326c43599c64fe6330136cfd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 687.443180] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 687.466066] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79441aa3326c43599c64fe6330136cfd [ 687.499195] env[62740]: DEBUG oslo_vmware.rw_handles [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/29703b3d-5009-405a-b4e8-274fcdf36d31/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 687.562340] env[62740]: DEBUG oslo_vmware.rw_handles [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 687.562561] env[62740]: DEBUG oslo_vmware.rw_handles [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/29703b3d-5009-405a-b4e8-274fcdf36d31/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 687.904472] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b682ff8b-53b5-461a-977b-8f305a7f6c3a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.911455] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6174347f-4e35-4743-b1cb-817bbbe62d10 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.940484] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ace9f7-6295-4b50-ac5b-d827bfb73cdd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.947187] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04d580d-8adf-4dd2-9d90-b14a728cb1ed {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.959908] env[62740]: DEBUG nova.compute.provider_tree [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.960429] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg c7ed671064de4f06958d777c7cc79f4b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 687.967939] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7ed671064de4f06958d777c7cc79f4b [ 687.968871] env[62740]: DEBUG nova.scheduler.client.report [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 687.971259] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg 086da71a6e99483f9e3b1440f729b804 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 687.982191] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 086da71a6e99483f9e3b1440f729b804 [ 687.982922] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.562s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.983451] env[62740]: ERROR nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 687.983451] env[62740]: Faults: ['InvalidArgument'] [ 687.983451] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Traceback (most recent call last): [ 687.983451] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 687.983451] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] self.driver.spawn(context, instance, image_meta, [ 687.983451] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 687.983451] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 687.983451] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 687.983451] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] self._fetch_image_if_missing(context, vi) [ 687.983451] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 687.983451] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] image_cache(vi, tmp_image_ds_loc) [ 687.983451] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 687.983796] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] vm_util.copy_virtual_disk( [ 687.983796] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 687.983796] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] session._wait_for_task(vmdk_copy_task) [ 687.983796] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 687.983796] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] return self.wait_for_task(task_ref) [ 687.983796] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 687.983796] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] return evt.wait() [ 687.983796] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 687.983796] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] result = hub.switch() [ 687.983796] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 687.983796] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] return self.greenlet.switch() [ 687.983796] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 687.983796] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] self.f(*self.args, **self.kw) [ 687.984251] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 687.984251] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] raise exceptions.translate_fault(task_info.error) [ 687.984251] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 687.984251] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Faults: ['InvalidArgument'] [ 687.984251] env[62740]: ERROR nova.compute.manager [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] [ 687.984251] env[62740]: DEBUG nova.compute.utils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 687.985525] env[62740]: DEBUG nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Build of instance 61b01264-eb0f-410c-8b39-971b95d16bb9 was re-scheduled: A specified parameter was not correct: fileType [ 687.985525] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 687.985901] env[62740]: DEBUG nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 687.986104] env[62740]: DEBUG nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 687.986283] env[62740]: DEBUG nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 687.986462] env[62740]: DEBUG nova.network.neutron [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 688.489164] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg f321f3a7c5144a9e81654c7e91844d7d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 688.501569] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f321f3a7c5144a9e81654c7e91844d7d [ 688.502203] env[62740]: DEBUG nova.network.neutron [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.502680] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg ab9a482b29164c1686c423c6d7a95e47 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 688.514333] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab9a482b29164c1686c423c6d7a95e47 [ 688.514941] env[62740]: INFO nova.compute.manager [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] Took 0.53 seconds to deallocate network for instance. [ 688.516619] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg 4d957b6f8cc04095b006b7be0d1ce5f8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 688.556189] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d957b6f8cc04095b006b7be0d1ce5f8 [ 688.558855] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg 5338bc3f95184b6fb6f452d4d19963ef in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 688.598898] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5338bc3f95184b6fb6f452d4d19963ef [ 688.628328] env[62740]: INFO nova.scheduler.client.report [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Deleted allocations for instance 61b01264-eb0f-410c-8b39-971b95d16bb9 [ 688.634661] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Expecting reply to msg 9919bad5b1ea4ac8a5257639161f787a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 688.650856] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9919bad5b1ea4ac8a5257639161f787a [ 688.650996] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f4db0e51-01a3-428d-a9b5-3afbedc13f29 tempest-ServerExternalEventsTest-886346695 tempest-ServerExternalEventsTest-886346695-project-member] Lock "61b01264-eb0f-410c-8b39-971b95d16bb9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.999s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.651569] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 9c8153a751064d1381d90e4f4635368b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 688.661023] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "61b01264-eb0f-410c-8b39-971b95d16bb9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 106.424s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.661023] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 61b01264-eb0f-410c-8b39-971b95d16bb9] During sync_power_state the instance has a pending task (spawning). Skip. [ 688.661023] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "61b01264-eb0f-410c-8b39-971b95d16bb9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.664414] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c8153a751064d1381d90e4f4635368b [ 688.664790] env[62740]: DEBUG nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 688.666433] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg eeb4a57f3ad84f9caae8ce21fba0fc67 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 688.702166] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eeb4a57f3ad84f9caae8ce21fba0fc67 [ 688.720077] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.720358] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.723328] env[62740]: INFO nova.compute.claims [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 688.725221] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 0bd7416b623b4aaab9c139a97930d7b0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 688.761829] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0bd7416b623b4aaab9c139a97930d7b0 [ 688.762902] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 74c73247442c4befbfee08c6fa9c7e4b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 688.772614] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74c73247442c4befbfee08c6fa9c7e4b [ 689.173832] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7ab47a-689a-414d-9e67-8420826f1307 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.181617] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001c41cf-f177-4936-baab-84552aefab2b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.210839] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e4fee3-45b2-47b9-bda4-18f04df459c3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.217958] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7130ea0-e759-44d4-8448-7b0c294dbcec {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.230989] env[62740]: DEBUG nova.compute.provider_tree [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.231498] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg db9a6e2987c046da8d9e4e360fb5baad in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 689.238415] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db9a6e2987c046da8d9e4e360fb5baad [ 689.239310] env[62740]: DEBUG nova.scheduler.client.report [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 689.241642] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg a3275c05131543118e6650293e1938d3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 689.255990] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3275c05131543118e6650293e1938d3 [ 689.256903] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.536s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.257244] env[62740]: DEBUG nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 689.258918] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 23c9ad69d24c4193bea8ab8da1f18355 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 689.297441] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23c9ad69d24c4193bea8ab8da1f18355 [ 689.298752] env[62740]: DEBUG nova.compute.utils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 689.299365] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg c9dbbdcaf41741b1bb7d02defb7aa8b4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 689.300160] env[62740]: DEBUG nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 689.300330] env[62740]: DEBUG nova.network.neutron [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 689.308250] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9dbbdcaf41741b1bb7d02defb7aa8b4 [ 689.308747] env[62740]: DEBUG nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 689.310429] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 9cd104076d804b4a810aea304713a461 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 689.338617] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9cd104076d804b4a810aea304713a461 [ 689.341471] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg b40b9a73d5d244d5bacc7061fc075334 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 689.371781] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b40b9a73d5d244d5bacc7061fc075334 [ 689.372981] env[62740]: DEBUG nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 689.387174] env[62740]: DEBUG nova.policy [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '865d4b967faf4e28bcb9eca45175e5d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f91cddc4ce184025b45b526c8bc56a5b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 689.399937] env[62740]: DEBUG nova.virt.hardware [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 689.400180] env[62740]: DEBUG nova.virt.hardware [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 689.400341] env[62740]: DEBUG nova.virt.hardware [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 689.400522] env[62740]: DEBUG nova.virt.hardware [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 689.400666] env[62740]: DEBUG nova.virt.hardware [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 689.400812] env[62740]: DEBUG nova.virt.hardware [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 689.401021] env[62740]: DEBUG nova.virt.hardware [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 689.401185] env[62740]: DEBUG nova.virt.hardware [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 689.401349] env[62740]: DEBUG nova.virt.hardware [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 689.401509] env[62740]: DEBUG nova.virt.hardware [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 689.401677] env[62740]: DEBUG nova.virt.hardware [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 689.402545] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2814e3-b56f-4a81-8ff2-c38344b4ee11 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.410423] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e32f5c6-f0e0-409a-a31f-001e18fb0724 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.921319] env[62740]: DEBUG nova.network.neutron [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Successfully created port: 586520a6-5d6b-491c-9b64-1d8742a59c83 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 690.845113] env[62740]: DEBUG nova.network.neutron [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Successfully updated port: 586520a6-5d6b-491c-9b64-1d8742a59c83 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 690.845113] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg d38c38bcb6144e65815951e5057ce91f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 690.855408] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d38c38bcb6144e65815951e5057ce91f [ 690.858325] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquiring lock "refresh_cache-149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.858325] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquired lock "refresh_cache-149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.858325] env[62740]: DEBUG nova.network.neutron [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 690.858325] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg d1f0daf54d744faeb0884b218bfdbcf0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 690.871841] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1f0daf54d744faeb0884b218bfdbcf0 [ 690.957686] env[62740]: DEBUG nova.network.neutron [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 690.979586] env[62740]: DEBUG nova.compute.manager [req-e701c116-127e-41de-8b38-82b8808fdde4 req-b6479a13-ed8e-4163-bbd8-f53d692dcf5a service nova] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Received event network-vif-plugged-586520a6-5d6b-491c-9b64-1d8742a59c83 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 690.979869] env[62740]: DEBUG oslo_concurrency.lockutils [req-e701c116-127e-41de-8b38-82b8808fdde4 req-b6479a13-ed8e-4163-bbd8-f53d692dcf5a service nova] Acquiring lock "149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.980348] env[62740]: DEBUG oslo_concurrency.lockutils [req-e701c116-127e-41de-8b38-82b8808fdde4 req-b6479a13-ed8e-4163-bbd8-f53d692dcf5a service nova] Lock "149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.980348] env[62740]: DEBUG oslo_concurrency.lockutils [req-e701c116-127e-41de-8b38-82b8808fdde4 req-b6479a13-ed8e-4163-bbd8-f53d692dcf5a service nova] Lock "149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.980625] env[62740]: DEBUG nova.compute.manager [req-e701c116-127e-41de-8b38-82b8808fdde4 req-b6479a13-ed8e-4163-bbd8-f53d692dcf5a service nova] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] No waiting events found dispatching network-vif-plugged-586520a6-5d6b-491c-9b64-1d8742a59c83 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 690.980898] env[62740]: WARNING nova.compute.manager [req-e701c116-127e-41de-8b38-82b8808fdde4 req-b6479a13-ed8e-4163-bbd8-f53d692dcf5a service nova] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Received unexpected event network-vif-plugged-586520a6-5d6b-491c-9b64-1d8742a59c83 for instance with vm_state building and task_state spawning. [ 691.197896] env[62740]: DEBUG nova.network.neutron [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Updating instance_info_cache with network_info: [{"id": "586520a6-5d6b-491c-9b64-1d8742a59c83", "address": "fa:16:3e:dd:b8:4c", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap586520a6-5d", "ovs_interfaceid": "586520a6-5d6b-491c-9b64-1d8742a59c83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.198246] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg c01873a116284515a8e99fa08fc2d6b7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 691.211708] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c01873a116284515a8e99fa08fc2d6b7 [ 691.212337] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Releasing lock "refresh_cache-149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.212616] env[62740]: DEBUG nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Instance network_info: |[{"id": "586520a6-5d6b-491c-9b64-1d8742a59c83", "address": "fa:16:3e:dd:b8:4c", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap586520a6-5d", "ovs_interfaceid": "586520a6-5d6b-491c-9b64-1d8742a59c83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 691.213012] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:b8:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '586520a6-5d6b-491c-9b64-1d8742a59c83', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 691.221018] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Creating folder: Project (f91cddc4ce184025b45b526c8bc56a5b). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 691.221540] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ec6674c-5a5b-47d2-8bc1-10291012822d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.233888] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Created folder: Project (f91cddc4ce184025b45b526c8bc56a5b) in parent group-v156037. [ 691.234091] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Creating folder: Instances. Parent ref: group-v156075. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 691.234311] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0fa6c132-1725-4501-be9b-9762a89e34da {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.243116] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Created folder: Instances in parent group-v156075. [ 691.243340] env[62740]: DEBUG oslo.service.loopingcall [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 691.243519] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 691.243702] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd701c07-d8e2-494c-b86e-eab8a41ce29d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.263601] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 691.263601] env[62740]: value = "task-640087" [ 691.263601] env[62740]: _type = "Task" [ 691.263601] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.270524] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640087, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.773948] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640087, 'name': CreateVM_Task, 'duration_secs': 0.298192} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.774299] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 691.775096] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.775388] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.775786] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 691.776160] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62ec4ce8-d0f1-426c-b1b6-4bd0869eafda {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.780959] env[62740]: DEBUG oslo_vmware.api [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Waiting for the task: (returnval){ [ 691.780959] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52d78f5b-213f-5163-01bd-2e5661064a58" [ 691.780959] env[62740]: _type = "Task" [ 691.780959] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.790476] env[62740]: DEBUG oslo_vmware.api [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52d78f5b-213f-5163-01bd-2e5661064a58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.292434] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.292796] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 692.293029] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.191073] env[62740]: DEBUG nova.compute.manager [req-e897cf5c-ee00-428e-a39d-8e532a5bd486 req-a0254b49-a539-40bd-ab93-b18c3c930b52 service nova] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Received event network-changed-586520a6-5d6b-491c-9b64-1d8742a59c83 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 693.191302] env[62740]: DEBUG nova.compute.manager [req-e897cf5c-ee00-428e-a39d-8e532a5bd486 req-a0254b49-a539-40bd-ab93-b18c3c930b52 service nova] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Refreshing instance network info cache due to event network-changed-586520a6-5d6b-491c-9b64-1d8742a59c83. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 693.191534] env[62740]: DEBUG oslo_concurrency.lockutils [req-e897cf5c-ee00-428e-a39d-8e532a5bd486 req-a0254b49-a539-40bd-ab93-b18c3c930b52 service nova] Acquiring lock "refresh_cache-149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.191693] env[62740]: DEBUG oslo_concurrency.lockutils [req-e897cf5c-ee00-428e-a39d-8e532a5bd486 req-a0254b49-a539-40bd-ab93-b18c3c930b52 service nova] Acquired lock "refresh_cache-149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.191863] env[62740]: DEBUG nova.network.neutron [req-e897cf5c-ee00-428e-a39d-8e532a5bd486 req-a0254b49-a539-40bd-ab93-b18c3c930b52 service nova] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Refreshing network info cache for port 586520a6-5d6b-491c-9b64-1d8742a59c83 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 693.192384] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-e897cf5c-ee00-428e-a39d-8e532a5bd486 req-a0254b49-a539-40bd-ab93-b18c3c930b52 service nova] Expecting reply to msg 854e73e0f5fc4b6091bd04c10a79dab3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 693.199830] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 854e73e0f5fc4b6091bd04c10a79dab3 [ 693.560929] env[62740]: DEBUG nova.network.neutron [req-e897cf5c-ee00-428e-a39d-8e532a5bd486 req-a0254b49-a539-40bd-ab93-b18c3c930b52 service nova] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Updated VIF entry in instance network info cache for port 586520a6-5d6b-491c-9b64-1d8742a59c83. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 693.561314] env[62740]: DEBUG nova.network.neutron [req-e897cf5c-ee00-428e-a39d-8e532a5bd486 req-a0254b49-a539-40bd-ab93-b18c3c930b52 service nova] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Updating instance_info_cache with network_info: [{"id": "586520a6-5d6b-491c-9b64-1d8742a59c83", "address": "fa:16:3e:dd:b8:4c", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap586520a6-5d", "ovs_interfaceid": "586520a6-5d6b-491c-9b64-1d8742a59c83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.561837] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-e897cf5c-ee00-428e-a39d-8e532a5bd486 req-a0254b49-a539-40bd-ab93-b18c3c930b52 service nova] Expecting reply to msg 232b8da7128f498090d39b688243004c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 693.570393] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 232b8da7128f498090d39b688243004c [ 693.571014] env[62740]: DEBUG oslo_concurrency.lockutils [req-e897cf5c-ee00-428e-a39d-8e532a5bd486 req-a0254b49-a539-40bd-ab93-b18c3c930b52 service nova] Releasing lock "refresh_cache-149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.102993] env[62740]: WARNING oslo_vmware.rw_handles [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 696.102993] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 696.102993] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 696.102993] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 696.102993] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 696.102993] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 696.102993] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 696.102993] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 696.102993] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 696.102993] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 696.102993] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 696.102993] env[62740]: ERROR oslo_vmware.rw_handles [ 696.103591] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/9fa89c00-b565-4459-bacd-e9e13746e171/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 696.104689] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 696.104935] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Copying Virtual Disk [datastore1] vmware_temp/9fa89c00-b565-4459-bacd-e9e13746e171/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/9fa89c00-b565-4459-bacd-e9e13746e171/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 696.105232] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5665bfae-c0ae-4e8b-a103-6423f93b44da {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.113556] env[62740]: DEBUG oslo_vmware.api [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Waiting for the task: (returnval){ [ 696.113556] env[62740]: value = "task-640088" [ 696.113556] env[62740]: _type = "Task" [ 696.113556] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.121716] env[62740]: DEBUG oslo_vmware.api [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Task: {'id': task-640088, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.624193] env[62740]: DEBUG oslo_vmware.exceptions [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 696.624484] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.625043] env[62740]: ERROR nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 696.625043] env[62740]: Faults: ['InvalidArgument'] [ 696.625043] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Traceback (most recent call last): [ 696.625043] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 696.625043] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] yield resources [ 696.625043] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 696.625043] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] self.driver.spawn(context, instance, image_meta, [ 696.625043] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 696.625043] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] self._vmops.spawn(context, instance, image_meta, injected_files, [ 696.625043] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 696.625043] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] self._fetch_image_if_missing(context, vi) [ 696.625043] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 696.625379] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] image_cache(vi, tmp_image_ds_loc) [ 696.625379] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 696.625379] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] vm_util.copy_virtual_disk( [ 696.625379] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 696.625379] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] session._wait_for_task(vmdk_copy_task) [ 696.625379] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 696.625379] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] return self.wait_for_task(task_ref) [ 696.625379] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 696.625379] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] return evt.wait() [ 696.625379] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 696.625379] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] result = hub.switch() [ 696.625379] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 696.625379] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] return self.greenlet.switch() [ 696.625725] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 696.625725] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] self.f(*self.args, **self.kw) [ 696.625725] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 696.625725] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] raise exceptions.translate_fault(task_info.error) [ 696.625725] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 696.625725] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Faults: ['InvalidArgument'] [ 696.625725] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] [ 696.625725] env[62740]: INFO nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Terminating instance [ 696.626941] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.627167] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 696.627778] env[62740]: DEBUG nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 696.627963] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 696.628266] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad0f5b52-f02c-436e-8866-575dfcd2dde7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.630802] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13c9095-5e1d-4550-a3da-83869fe01b13 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.637936] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 696.638158] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c6bdf35-ae66-4ee5-89d2-d70510ccc60e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.640643] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 696.640833] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 696.643092] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b17e7ca-caa0-4c8e-86da-2f2f129e54b3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.649523] env[62740]: DEBUG oslo_vmware.api [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Waiting for the task: (returnval){ [ 696.649523] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52c140bb-7c80-837d-c329-4133e748829b" [ 696.649523] env[62740]: _type = "Task" [ 696.649523] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.662436] env[62740]: DEBUG oslo_vmware.api [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52c140bb-7c80-837d-c329-4133e748829b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.722653] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 696.722883] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 696.723086] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Deleting the datastore file [datastore1] 4f89b342-0375-48f5-b5cf-713a8d57a182 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 696.723356] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-18c87cf3-1a26-4554-8129-69f75a5fdaf3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.729336] env[62740]: DEBUG oslo_vmware.api [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Waiting for the task: (returnval){ [ 696.729336] env[62740]: value = "task-640090" [ 696.729336] env[62740]: _type = "Task" [ 696.729336] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.737384] env[62740]: DEBUG oslo_vmware.api [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Task: {'id': task-640090, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.159422] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 697.159702] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Creating directory with path [datastore1] vmware_temp/93aa040f-c4a6-426a-83e9-98d183992c0b/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 697.159932] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1598c7c9-a3c2-433e-be78-d9c85caa984e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.171807] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Created directory with path [datastore1] vmware_temp/93aa040f-c4a6-426a-83e9-98d183992c0b/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 697.172011] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Fetch image to [datastore1] vmware_temp/93aa040f-c4a6-426a-83e9-98d183992c0b/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 697.172194] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/93aa040f-c4a6-426a-83e9-98d183992c0b/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 697.172939] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a48a8b-8036-4f0b-9519-0f40838195b6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.179685] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dcfb6d1-df2d-4adf-bd68-3b638789060f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.188746] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13afe7f6-0e9d-4a88-91a9-76fcf1aa26a7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.219706] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c100ef57-f82e-4606-a4f8-3cef770ea1cd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.225798] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6a5b82de-cc1f-48e3-bd0d-84349a90d45b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.237556] env[62740]: DEBUG oslo_vmware.api [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Task: {'id': task-640090, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076329} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.237789] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 697.237964] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 697.238510] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 697.238510] env[62740]: INFO nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Took 0.61 seconds to destroy the instance on the hypervisor. [ 697.240500] env[62740]: DEBUG nova.compute.claims [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 697.240674] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.240881] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.242761] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg dcd3479866f64955ae1f4205d4e8c062 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 697.249702] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 697.282628] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcd3479866f64955ae1f4205d4e8c062 [ 697.309103] env[62740]: DEBUG oslo_vmware.rw_handles [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/93aa040f-c4a6-426a-83e9-98d183992c0b/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 697.384831] env[62740]: DEBUG oslo_vmware.rw_handles [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 697.385122] env[62740]: DEBUG oslo_vmware.rw_handles [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/93aa040f-c4a6-426a-83e9-98d183992c0b/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 697.709096] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34248010-8b31-450a-8d5a-e52afbecb6ca {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.716623] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea7a7cc-59c5-4ecd-ad6b-2c90a1c43d72 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.745389] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d6d67a-c00a-4d29-809f-bd589fb2c7cd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.752924] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ef6e27-6f7d-4bc4-affc-7d28c95878e3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.766015] env[62740]: DEBUG nova.compute.provider_tree [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.766451] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg ab6c044359a24f24aac987759f39a9db in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 697.773693] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab6c044359a24f24aac987759f39a9db [ 697.774642] env[62740]: DEBUG nova.scheduler.client.report [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 697.777154] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg 82d4fba400f84428bd8ba0a0aa2547d9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 697.789354] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82d4fba400f84428bd8ba0a0aa2547d9 [ 697.790068] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.549s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.790626] env[62740]: ERROR nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 697.790626] env[62740]: Faults: ['InvalidArgument'] [ 697.790626] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Traceback (most recent call last): [ 697.790626] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 697.790626] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] self.driver.spawn(context, instance, image_meta, [ 697.790626] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 697.790626] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.790626] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 697.790626] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] self._fetch_image_if_missing(context, vi) [ 697.790626] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 697.790626] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] image_cache(vi, tmp_image_ds_loc) [ 697.790626] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 697.791014] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] vm_util.copy_virtual_disk( [ 697.791014] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 697.791014] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] session._wait_for_task(vmdk_copy_task) [ 697.791014] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 697.791014] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] return self.wait_for_task(task_ref) [ 697.791014] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 697.791014] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] return evt.wait() [ 697.791014] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 697.791014] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] result = hub.switch() [ 697.791014] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 697.791014] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] return self.greenlet.switch() [ 697.791014] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 697.791014] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] self.f(*self.args, **self.kw) [ 697.791364] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 697.791364] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] raise exceptions.translate_fault(task_info.error) [ 697.791364] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 697.791364] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Faults: ['InvalidArgument'] [ 697.791364] env[62740]: ERROR nova.compute.manager [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] [ 697.791364] env[62740]: DEBUG nova.compute.utils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 697.792863] env[62740]: DEBUG nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Build of instance 4f89b342-0375-48f5-b5cf-713a8d57a182 was re-scheduled: A specified parameter was not correct: fileType [ 697.792863] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 697.793072] env[62740]: DEBUG nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 697.793258] env[62740]: DEBUG nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 697.793433] env[62740]: DEBUG nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 697.793601] env[62740]: DEBUG nova.network.neutron [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 698.202830] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg cfb9e1a469cd47f99b1c2f6be4eaa498 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 698.214046] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfb9e1a469cd47f99b1c2f6be4eaa498 [ 698.214596] env[62740]: DEBUG nova.network.neutron [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.215070] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg ab78f44ad70d48c8ad1d941feda06975 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 698.228595] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab78f44ad70d48c8ad1d941feda06975 [ 698.229242] env[62740]: INFO nova.compute.manager [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] [instance: 4f89b342-0375-48f5-b5cf-713a8d57a182] Took 0.44 seconds to deallocate network for instance. [ 698.230876] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg c6cd1dcf059a479b9f02c18fe96c6245 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 698.266697] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6cd1dcf059a479b9f02c18fe96c6245 [ 698.269374] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg 97b9afc59d1644b0a3f94b9bb4dc3746 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 698.301199] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97b9afc59d1644b0a3f94b9bb4dc3746 [ 698.325768] env[62740]: INFO nova.scheduler.client.report [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Deleted allocations for instance 4f89b342-0375-48f5-b5cf-713a8d57a182 [ 698.333077] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Expecting reply to msg 29bdd5171d3a4908814ca5bada24c4da in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 698.346112] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29bdd5171d3a4908814ca5bada24c4da [ 698.346810] env[62740]: DEBUG oslo_concurrency.lockutils [None req-174ba3be-3ce5-4d3e-aae2-bd2a69fce88a tempest-ServerMetadataNegativeTestJSON-2119363439 tempest-ServerMetadataNegativeTestJSON-2119363439-project-member] Lock "4f89b342-0375-48f5-b5cf-713a8d57a182" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.492s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.348029] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg 0efc230699f64cfea121e5dcab7746ab in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 698.356851] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0efc230699f64cfea121e5dcab7746ab [ 698.357329] env[62740]: DEBUG nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 698.359183] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg 485497650055443ca882f8e3c01b4c2e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 698.397542] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 485497650055443ca882f8e3c01b4c2e [ 698.413602] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.413861] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.415506] env[62740]: INFO nova.compute.claims [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 698.417497] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg 88c44b4e7a3647d7848f5b3e02a6c3a7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 698.449496] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88c44b4e7a3647d7848f5b3e02a6c3a7 [ 698.451255] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg 998db72ff5aa4ee796109589c86fc619 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 698.458042] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 998db72ff5aa4ee796109589c86fc619 [ 698.815920] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dcdcb8b-84a1-4bb7-8b72-a855e51da2ba {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.824382] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdae7778-ef01-4a53-9ecb-443e4f316014 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.856527] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c76d860-7117-4560-a327-8c93fc2e934f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.864386] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8b353d-c75c-46fb-9ec1-d08cf7453a6f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.878747] env[62740]: DEBUG nova.compute.provider_tree [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.879561] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg 71a45b6d52714e7a8e7cd9cbc5943fbe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 698.887110] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71a45b6d52714e7a8e7cd9cbc5943fbe [ 698.888151] env[62740]: DEBUG nova.scheduler.client.report [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 698.890925] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg 71521ccf4e9e4a16b992843a68999046 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 698.906016] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71521ccf4e9e4a16b992843a68999046 [ 698.906016] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.490s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.906016] env[62740]: DEBUG nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 698.906670] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg 32584f2ffab744e59a235aa41e4fd9c9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 698.943020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32584f2ffab744e59a235aa41e4fd9c9 [ 698.943020] env[62740]: DEBUG nova.compute.utils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 698.943020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg cda67a753ca0432fa5f4d41aa1e08f01 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 698.943710] env[62740]: DEBUG nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 698.943976] env[62740]: DEBUG nova.network.neutron [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 698.952474] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cda67a753ca0432fa5f4d41aa1e08f01 [ 698.953045] env[62740]: DEBUG nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 698.954735] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg 2110d9862720410eb16e336a8ec342d5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 698.984539] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2110d9862720410eb16e336a8ec342d5 [ 698.987395] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg c2410d2b0407451cb0ec940877631858 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 699.018031] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2410d2b0407451cb0ec940877631858 [ 699.019418] env[62740]: DEBUG nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 699.043422] env[62740]: DEBUG nova.policy [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '89b62264883e4f1485cc94c9222b4392', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd11a0618f9a8492c96e5db989a4118d1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 699.048371] env[62740]: DEBUG nova.virt.hardware [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 699.048371] env[62740]: DEBUG nova.virt.hardware [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 699.048371] env[62740]: DEBUG nova.virt.hardware [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 699.048557] env[62740]: DEBUG nova.virt.hardware [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 699.048651] env[62740]: DEBUG nova.virt.hardware [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 699.048821] env[62740]: DEBUG nova.virt.hardware [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 699.049109] env[62740]: DEBUG nova.virt.hardware [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 699.049383] env[62740]: DEBUG nova.virt.hardware [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 699.051170] env[62740]: DEBUG nova.virt.hardware [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 699.051170] env[62740]: DEBUG nova.virt.hardware [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 699.051170] env[62740]: DEBUG nova.virt.hardware [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 699.051170] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e51101-7318-4fda-9c0a-63ea9817234e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.059408] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e1e012-3c2c-404b-bc74-b2c34b8007bf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.558140] env[62740]: DEBUG nova.network.neutron [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Successfully created port: 028d2815-af45-491c-b4da-f9af9036bc4a {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 700.800020] env[62740]: DEBUG nova.network.neutron [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Successfully updated port: 028d2815-af45-491c-b4da-f9af9036bc4a {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 700.800020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg e8a26fb96d2b4684ba899915a656e070 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 700.813099] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8a26fb96d2b4684ba899915a656e070 [ 700.813844] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Acquiring lock "refresh_cache-fedb62e0-2602-4772-9e5d-00645922d2a8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.813989] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Acquired lock "refresh_cache-fedb62e0-2602-4772-9e5d-00645922d2a8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.814166] env[62740]: DEBUG nova.network.neutron [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 700.814904] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg db2a057793634e23822bd040ea1a082b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 700.824768] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db2a057793634e23822bd040ea1a082b [ 700.879736] env[62740]: DEBUG nova.network.neutron [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 701.057377] env[62740]: DEBUG nova.compute.manager [req-9a320875-444d-4420-965b-8bc67dc970bf req-1ef76495-4ad0-4c20-9fad-8755d6c59fd6 service nova] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Received event network-vif-plugged-028d2815-af45-491c-b4da-f9af9036bc4a {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 701.057991] env[62740]: DEBUG oslo_concurrency.lockutils [req-9a320875-444d-4420-965b-8bc67dc970bf req-1ef76495-4ad0-4c20-9fad-8755d6c59fd6 service nova] Acquiring lock "fedb62e0-2602-4772-9e5d-00645922d2a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.058338] env[62740]: DEBUG oslo_concurrency.lockutils [req-9a320875-444d-4420-965b-8bc67dc970bf req-1ef76495-4ad0-4c20-9fad-8755d6c59fd6 service nova] Lock "fedb62e0-2602-4772-9e5d-00645922d2a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.058535] env[62740]: DEBUG oslo_concurrency.lockutils [req-9a320875-444d-4420-965b-8bc67dc970bf req-1ef76495-4ad0-4c20-9fad-8755d6c59fd6 service nova] Lock "fedb62e0-2602-4772-9e5d-00645922d2a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.058752] env[62740]: DEBUG nova.compute.manager [req-9a320875-444d-4420-965b-8bc67dc970bf req-1ef76495-4ad0-4c20-9fad-8755d6c59fd6 service nova] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] No waiting events found dispatching network-vif-plugged-028d2815-af45-491c-b4da-f9af9036bc4a {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 701.058874] env[62740]: WARNING nova.compute.manager [req-9a320875-444d-4420-965b-8bc67dc970bf req-1ef76495-4ad0-4c20-9fad-8755d6c59fd6 service nova] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Received unexpected event network-vif-plugged-028d2815-af45-491c-b4da-f9af9036bc4a for instance with vm_state building and task_state spawning. [ 701.138684] env[62740]: DEBUG nova.network.neutron [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Updating instance_info_cache with network_info: [{"id": "028d2815-af45-491c-b4da-f9af9036bc4a", "address": "fa:16:3e:90:b7:7a", "network": {"id": "4e16caae-0c08-42f9-ab6a-328f99c87ec5", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1780388540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d11a0618f9a8492c96e5db989a4118d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap028d2815-af", "ovs_interfaceid": "028d2815-af45-491c-b4da-f9af9036bc4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.139260] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg 27bb8ec470c54a65b016269e277a0575 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 701.152634] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27bb8ec470c54a65b016269e277a0575 [ 701.153299] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Releasing lock "refresh_cache-fedb62e0-2602-4772-9e5d-00645922d2a8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.153586] env[62740]: DEBUG nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Instance network_info: |[{"id": "028d2815-af45-491c-b4da-f9af9036bc4a", "address": "fa:16:3e:90:b7:7a", "network": {"id": "4e16caae-0c08-42f9-ab6a-328f99c87ec5", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1780388540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d11a0618f9a8492c96e5db989a4118d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap028d2815-af", "ovs_interfaceid": "028d2815-af45-491c-b4da-f9af9036bc4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 701.153992] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:b7:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '778b9a40-d603-4765-ac88-bd6d42c457a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '028d2815-af45-491c-b4da-f9af9036bc4a', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 701.162424] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Creating folder: Project (d11a0618f9a8492c96e5db989a4118d1). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 701.163057] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b07b5a81-302d-4e71-98a4-97b80372ca72 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.175178] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Created folder: Project (d11a0618f9a8492c96e5db989a4118d1) in parent group-v156037. [ 701.175395] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Creating folder: Instances. Parent ref: group-v156078. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 701.175643] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f49c571c-3dcf-40c4-8941-7f79da35f073 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.184883] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Created folder: Instances in parent group-v156078. [ 701.185132] env[62740]: DEBUG oslo.service.loopingcall [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 701.185206] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 701.185391] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46bc50bd-ca0d-4417-a643-4a60a69a8c42 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.205318] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 701.205318] env[62740]: value = "task-640093" [ 701.205318] env[62740]: _type = "Task" [ 701.205318] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.213597] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640093, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.716512] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640093, 'name': CreateVM_Task} progress is 99%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.891584] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.891584] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 701.891584] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 701.892160] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg d495862bc459498f93f0af3a0691ff43 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 701.913916] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d495862bc459498f93f0af3a0691ff43 [ 701.916475] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 701.916630] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 701.916764] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: fece072a-baac-4301-988c-0068d6e71cff] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 701.916894] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 701.917031] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 701.917162] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 701.917287] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 701.917453] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 701.917519] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 701.917637] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 701.917757] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 702.216306] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640093, 'name': CreateVM_Task} progress is 99%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.716266] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640093, 'name': CreateVM_Task, 'duration_secs': 1.297168} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.716443] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 702.717120] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.717291] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.717612] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 702.717857] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7572c99c-19fd-4b8d-bec2-c046666386a1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.722230] env[62740]: DEBUG oslo_vmware.api [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Waiting for the task: (returnval){ [ 702.722230] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52719e94-6c43-ab1b-f62b-795b6920e8e9" [ 702.722230] env[62740]: _type = "Task" [ 702.722230] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.729846] env[62740]: DEBUG oslo_vmware.api [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52719e94-6c43-ab1b-f62b-795b6920e8e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.890617] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 702.890871] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 702.891046] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 702.891211] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 702.891363] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 702.891513] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 702.891648] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 702.892016] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 74bdc9fe450b441c98bbc8dd545692a7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 702.902459] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74bdc9fe450b441c98bbc8dd545692a7 [ 702.903508] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.903725] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.903894] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.904060] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 702.905178] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9ac3a3-e47e-4d1a-9cf3-4b0b68b2f8b3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.914653] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5594855e-ade3-4d33-81da-5434bb5cf12b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.930780] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80469f3a-83a0-49e1-8e6d-f1bb73ba5ff7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.937899] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68edad22-ac63-482d-a3aa-235373cb4210 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.966855] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181684MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 702.967028] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.967235] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.968134] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 75e75b66e5074d13a508728a22feebc6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.006892] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75e75b66e5074d13a508728a22feebc6 [ 703.011032] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5a838aa11bd04678b0be61cb3d571c00 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.026056] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a838aa11bd04678b0be61cb3d571c00 [ 703.054988] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 9d175573-2af2-4f66-98cd-411d10f749f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.055177] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b326be2c-43f2-4f04-9652-cec7e017288e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.055309] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance fece072a-baac-4301-988c-0068d6e71cff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.055433] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3102cc87-df1a-4de8-bfdb-9b904f40ea2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.055610] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4f0d1356-bdfb-4cb2-979a-e28f9025b311 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.055896] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 75050b95-60c6-4e44-a1d5-0d47492dd739 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.055896] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 8053d2ae-ca61-4282-aa89-83f3a2e107bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.056017] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a24df1e4-2865-4ab3-beae-0892dca12bef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.056131] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.056253] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance fedb62e0-2602-4772-9e5d-00645922d2a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.056811] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5ba6dfbe3bf14020913aa0649f55ce6d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.068748] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ba6dfbe3bf14020913aa0649f55ce6d [ 703.069582] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f98589dc-ea7a-44c8-8cca-119d126ea0de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.070098] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7e754b087e864bb79f8beb0c3d1f270f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.081965] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e754b087e864bb79f8beb0c3d1f270f [ 703.082640] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance fa5248d1-bddf-4244-a363-2113b0473980 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.083164] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b2f3999205ec4e45bf2ca9ade760e71a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.096591] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2f3999205ec4e45bf2ca9ade760e71a [ 703.097400] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6ec38a6c-f4b2-42ce-b371-5fe82d577545 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.097861] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 54074a25b48e4e66b6616e6f8d07b199 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.109242] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54074a25b48e4e66b6616e6f8d07b199 [ 703.110023] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 26712c18-d9f4-4d7d-80fb-4d527da9c1e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.110572] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 630640d1b48840848275938037076cd9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.121808] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 630640d1b48840848275938037076cd9 [ 703.122610] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance ab15259f-6344-4ba0-9abd-8b0ee7df59fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.123164] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 58bf8921baf04764b3febcfa36157e47 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.146064] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58bf8921baf04764b3febcfa36157e47 [ 703.147537] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 425930c0-b9f8-4966-ae9d-0687d0a07213 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.148791] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f37181dd0b064e8e88d1a36f70077fa7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.165798] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f37181dd0b064e8e88d1a36f70077fa7 [ 703.166613] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b2ec3212-25e1-4027-801d-a23309a4d0e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.167137] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 40e2f7f5392e4c068d92388caaa2356e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.178581] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40e2f7f5392e4c068d92388caaa2356e [ 703.179443] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1ae43e6d-c9ac-494d-a7a9-1f6ff538345a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.179983] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 820c1f35dc2c4183ad6d7435adb66121 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.189732] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 820c1f35dc2c4183ad6d7435adb66121 [ 703.190436] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d490ad64-a2fe-4c08-b0fc-56b2e00d9c98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.190962] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ecce83e564c84b3e9b37e730422cf533 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.205018] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecce83e564c84b3e9b37e730422cf533 [ 703.210262] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f7a74539-8a36-450f-aec4-d059670e8f38 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.210262] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 362df8ac7c4f4e808a81796f1740e7e3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.223202] env[62740]: DEBUG nova.compute.manager [req-103abd21-0141-42b5-9fbe-77a784d7ce49 req-c2823f8d-930b-46f9-a7d8-b6613f79374a service nova] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Received event network-changed-028d2815-af45-491c-b4da-f9af9036bc4a {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 703.223202] env[62740]: DEBUG nova.compute.manager [req-103abd21-0141-42b5-9fbe-77a784d7ce49 req-c2823f8d-930b-46f9-a7d8-b6613f79374a service nova] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Refreshing instance network info cache due to event network-changed-028d2815-af45-491c-b4da-f9af9036bc4a. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 703.223202] env[62740]: DEBUG oslo_concurrency.lockutils [req-103abd21-0141-42b5-9fbe-77a784d7ce49 req-c2823f8d-930b-46f9-a7d8-b6613f79374a service nova] Acquiring lock "refresh_cache-fedb62e0-2602-4772-9e5d-00645922d2a8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.223202] env[62740]: DEBUG oslo_concurrency.lockutils [req-103abd21-0141-42b5-9fbe-77a784d7ce49 req-c2823f8d-930b-46f9-a7d8-b6613f79374a service nova] Acquired lock "refresh_cache-fedb62e0-2602-4772-9e5d-00645922d2a8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.223202] env[62740]: DEBUG nova.network.neutron [req-103abd21-0141-42b5-9fbe-77a784d7ce49 req-c2823f8d-930b-46f9-a7d8-b6613f79374a service nova] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Refreshing network info cache for port 028d2815-af45-491c-b4da-f9af9036bc4a {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 703.223736] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-103abd21-0141-42b5-9fbe-77a784d7ce49 req-c2823f8d-930b-46f9-a7d8-b6613f79374a service nova] Expecting reply to msg 05ac924e683a4b83bd781c48c398dbaf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.224942] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 362df8ac7c4f4e808a81796f1740e7e3 [ 703.226021] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e473e254-387f-4581-97bc-bdeab221b10f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.226699] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7c523fdf7a2f4302a7bc6782bf8ad6a2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.233535] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05ac924e683a4b83bd781c48c398dbaf [ 703.243811] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.244174] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 703.244484] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.260271] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c523fdf7a2f4302a7bc6782bf8ad6a2 [ 703.260271] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 68aa9321-22ce-45a0-8323-fa8564dca46b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.260271] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 1dde87a646d64674a64ae1344b621e09 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.272216] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dde87a646d64674a64ae1344b621e09 [ 703.274823] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4ec9a397-1e4b-4767-b926-ccc6f63a951c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.274823] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 42b771448ab84900808525805005455a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.284435] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42b771448ab84900808525805005455a [ 703.285881] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e1c7a748-b3f3-41b7-8784-13699549a01d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.286836] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg e5fb33c00ef94297ac605b92a4ed22f0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.300124] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5fb33c00ef94297ac605b92a4ed22f0 [ 703.300124] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 25cef75e-2176-4999-965b-155cd7f8d137 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.300124] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg bf2903d3f6d54107ac155ddc1235e6f3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.310941] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf2903d3f6d54107ac155ddc1235e6f3 [ 703.310941] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 0e5caaed-20ff-40bd-b0cf-016ac18642cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.310941] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0fbd0f92e2ec48528a28d0428df11a2d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.322505] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fbd0f92e2ec48528a28d0428df11a2d [ 703.323295] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b9840eca-ec5f-4a8c-9bdf-1212e2640e5c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.323910] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b591fdc15c574d49b70956e243631cae in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.333035] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b591fdc15c574d49b70956e243631cae [ 703.333748] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance ec703551-5c8d-43bb-b727-709aeeeac9a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.334239] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 377bcbd625794d00817e731b59bb8157 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.347295] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 377bcbd625794d00817e731b59bb8157 [ 703.347971] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 00085df9-ce61-4ccc-8ecf-16956109eb8f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.348521] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 48aa36b802794080afbd519a9921b8bc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.360999] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48aa36b802794080afbd519a9921b8bc [ 703.361716] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e5b0daa1-6745-48ad-8e69-6c7362bac085 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.362235] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 4f5916f72d1b40c4938fba26b1e89cff in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.374518] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f5916f72d1b40c4938fba26b1e89cff [ 703.375450] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e8bb18d5-207c-48fb-b7e5-06b72ce61b4f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.375694] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 703.375845] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 703.775367] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9047e0ed-af95-4862-b541-98a9a7fef65f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.783827] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0b98e1-2399-4ca6-b5b9-e96d4692daa9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.816103] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10b6a16-044a-4f30-bbd7-d6e291e5d031 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.823531] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197613b6-7891-4a77-a76f-d9256a4f24e5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.836841] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.837348] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 6e5b76b1c7fb4d5e89de71f55c3a8af9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.848986] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e5b76b1c7fb4d5e89de71f55c3a8af9 [ 703.849924] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 703.852211] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 216a0bde0cf04b03ace454499318ad18 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 703.863424] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 216a0bde0cf04b03ace454499318ad18 [ 703.864066] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 703.864244] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.897s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.004433] env[62740]: DEBUG nova.network.neutron [req-103abd21-0141-42b5-9fbe-77a784d7ce49 req-c2823f8d-930b-46f9-a7d8-b6613f79374a service nova] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Updated VIF entry in instance network info cache for port 028d2815-af45-491c-b4da-f9af9036bc4a. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 704.004433] env[62740]: DEBUG nova.network.neutron [req-103abd21-0141-42b5-9fbe-77a784d7ce49 req-c2823f8d-930b-46f9-a7d8-b6613f79374a service nova] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Updating instance_info_cache with network_info: [{"id": "028d2815-af45-491c-b4da-f9af9036bc4a", "address": "fa:16:3e:90:b7:7a", "network": {"id": "4e16caae-0c08-42f9-ab6a-328f99c87ec5", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1780388540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d11a0618f9a8492c96e5db989a4118d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap028d2815-af", "ovs_interfaceid": "028d2815-af45-491c-b4da-f9af9036bc4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.004999] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-103abd21-0141-42b5-9fbe-77a784d7ce49 req-c2823f8d-930b-46f9-a7d8-b6613f79374a service nova] Expecting reply to msg 3ee19d8f73024fdc87de719a2b435a09 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 704.011453] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ee19d8f73024fdc87de719a2b435a09 [ 704.011453] env[62740]: DEBUG oslo_concurrency.lockutils [req-103abd21-0141-42b5-9fbe-77a784d7ce49 req-c2823f8d-930b-46f9-a7d8-b6613f79374a service nova] Releasing lock "refresh_cache-fedb62e0-2602-4772-9e5d-00645922d2a8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.863751] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 704.890590] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 707.034349] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Acquiring lock "697e3884-2ef4-423e-af81-e5d1e94f65a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.034657] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Lock "697e3884-2ef4-423e-af81-e5d1e94f65a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.335445] env[62740]: WARNING oslo_vmware.rw_handles [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 734.335445] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 734.335445] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 734.335445] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 734.335445] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 734.335445] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 734.335445] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 734.335445] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 734.335445] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 734.335445] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 734.335445] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 734.335445] env[62740]: ERROR oslo_vmware.rw_handles [ 734.336247] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/29703b3d-5009-405a-b4e8-274fcdf36d31/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 734.337811] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 734.338127] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Copying Virtual Disk [datastore2] vmware_temp/29703b3d-5009-405a-b4e8-274fcdf36d31/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/29703b3d-5009-405a-b4e8-274fcdf36d31/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 734.338510] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00bce054-968b-4549-ba8c-4e8cec6a3c42 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.348502] env[62740]: DEBUG oslo_vmware.api [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Waiting for the task: (returnval){ [ 734.348502] env[62740]: value = "task-640094" [ 734.348502] env[62740]: _type = "Task" [ 734.348502] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.356087] env[62740]: DEBUG oslo_vmware.api [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Task: {'id': task-640094, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.858408] env[62740]: DEBUG oslo_vmware.exceptions [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 734.858703] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.859294] env[62740]: ERROR nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 734.859294] env[62740]: Faults: ['InvalidArgument'] [ 734.859294] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Traceback (most recent call last): [ 734.859294] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 734.859294] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] yield resources [ 734.859294] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 734.859294] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] self.driver.spawn(context, instance, image_meta, [ 734.859294] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 734.859294] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 734.859294] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 734.859294] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] self._fetch_image_if_missing(context, vi) [ 734.859294] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 734.859609] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] image_cache(vi, tmp_image_ds_loc) [ 734.859609] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 734.859609] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] vm_util.copy_virtual_disk( [ 734.859609] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 734.859609] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] session._wait_for_task(vmdk_copy_task) [ 734.859609] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 734.859609] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] return self.wait_for_task(task_ref) [ 734.859609] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 734.859609] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] return evt.wait() [ 734.859609] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 734.859609] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] result = hub.switch() [ 734.859609] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 734.859609] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] return self.greenlet.switch() [ 734.859898] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 734.859898] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] self.f(*self.args, **self.kw) [ 734.859898] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 734.859898] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] raise exceptions.translate_fault(task_info.error) [ 734.859898] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 734.859898] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Faults: ['InvalidArgument'] [ 734.859898] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] [ 734.859898] env[62740]: INFO nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Terminating instance [ 734.861229] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.861443] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 734.862598] env[62740]: DEBUG nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 734.862794] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 734.863046] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c920f74b-69b4-4abb-98a0-f1af412f73e2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.865375] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8eb742-a26d-46f0-a45d-bfbf93c650ef {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.872251] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 734.872522] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-150a94f9-67d0-4d90-a77e-bcec17f05a33 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.874773] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 734.874951] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 734.875900] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98936f3a-4df6-4506-a051-d33f9397db91 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.884011] env[62740]: DEBUG oslo_vmware.api [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Waiting for the task: (returnval){ [ 734.884011] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]523c864d-b43f-bac3-5d26-4f420ee5336f" [ 734.884011] env[62740]: _type = "Task" [ 734.884011] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.891235] env[62740]: DEBUG oslo_vmware.api [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]523c864d-b43f-bac3-5d26-4f420ee5336f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.944930] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 734.945176] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 734.945363] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Deleting the datastore file [datastore2] 9d175573-2af2-4f66-98cd-411d10f749f0 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 734.945648] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-40bfb108-855f-46f9-af6e-701915786540 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.951336] env[62740]: DEBUG oslo_vmware.api [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Waiting for the task: (returnval){ [ 734.951336] env[62740]: value = "task-640096" [ 734.951336] env[62740]: _type = "Task" [ 734.951336] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.959045] env[62740]: DEBUG oslo_vmware.api [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Task: {'id': task-640096, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.401558] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 735.403695] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Creating directory with path [datastore2] vmware_temp/07c0d9ff-313d-4bf0-acec-e33e19cb7bea/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 735.403965] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bcaa8da3-9524-44c5-b532-4f4786c95a65 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.419332] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Created directory with path [datastore2] vmware_temp/07c0d9ff-313d-4bf0-acec-e33e19cb7bea/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 735.419617] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Fetch image to [datastore2] vmware_temp/07c0d9ff-313d-4bf0-acec-e33e19cb7bea/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 735.419702] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/07c0d9ff-313d-4bf0-acec-e33e19cb7bea/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 735.420505] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75fd947e-788c-4c22-9c94-2ccf2a2ce5c1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.427581] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed515c2-c480-43ed-8fce-0977ca2bb15a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.436687] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95303c6-cf87-458e-9dfd-10ef1352329b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.472298] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d57166-06e7-4ff7-a422-7c59f57a799f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.479533] env[62740]: DEBUG oslo_vmware.api [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Task: {'id': task-640096, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073037} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.481054] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 735.481264] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 735.481462] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 735.481628] env[62740]: INFO nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Took 0.62 seconds to destroy the instance on the hypervisor. [ 735.483717] env[62740]: DEBUG nova.compute.claims [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 735.483897] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.484145] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.486169] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg 5faf44a92c4d4cdba7be25644925b625 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 735.488282] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dee7a84e-bb8d-4640-ab94-54ffebbd7e38 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.511018] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 735.523058] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5faf44a92c4d4cdba7be25644925b625 [ 735.562382] env[62740]: DEBUG oslo_vmware.rw_handles [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/07c0d9ff-313d-4bf0-acec-e33e19cb7bea/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 735.627273] env[62740]: DEBUG oslo_vmware.rw_handles [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 735.627273] env[62740]: DEBUG oslo_vmware.rw_handles [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/07c0d9ff-313d-4bf0-acec-e33e19cb7bea/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 735.947377] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d79b9fd-a458-4c7f-b678-2b25fca55692 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.954756] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec7d0e0-6d43-47ae-b20f-ac994e1927e9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.983946] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e677b55e-fd11-40b3-920c-64e5cdfabc8f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.991402] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad688b3-097e-4444-8743-86656045dd67 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.004473] env[62740]: DEBUG nova.compute.provider_tree [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.004990] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg 27549993471f4adca8045f90fcb74887 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 736.013026] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27549993471f4adca8045f90fcb74887 [ 736.013419] env[62740]: DEBUG nova.scheduler.client.report [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 736.015737] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg facac4f92cb14a71b6030184c5962460 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 736.028049] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg facac4f92cb14a71b6030184c5962460 [ 736.029057] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.545s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.029453] env[62740]: ERROR nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 736.029453] env[62740]: Faults: ['InvalidArgument'] [ 736.029453] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Traceback (most recent call last): [ 736.029453] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 736.029453] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] self.driver.spawn(context, instance, image_meta, [ 736.029453] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 736.029453] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 736.029453] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 736.029453] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] self._fetch_image_if_missing(context, vi) [ 736.029453] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 736.029453] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] image_cache(vi, tmp_image_ds_loc) [ 736.029453] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 736.030059] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] vm_util.copy_virtual_disk( [ 736.030059] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 736.030059] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] session._wait_for_task(vmdk_copy_task) [ 736.030059] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 736.030059] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] return self.wait_for_task(task_ref) [ 736.030059] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 736.030059] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] return evt.wait() [ 736.030059] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 736.030059] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] result = hub.switch() [ 736.030059] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 736.030059] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] return self.greenlet.switch() [ 736.030059] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 736.030059] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] self.f(*self.args, **self.kw) [ 736.030562] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 736.030562] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] raise exceptions.translate_fault(task_info.error) [ 736.030562] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 736.030562] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Faults: ['InvalidArgument'] [ 736.030562] env[62740]: ERROR nova.compute.manager [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] [ 736.030562] env[62740]: DEBUG nova.compute.utils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 736.032214] env[62740]: DEBUG nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Build of instance 9d175573-2af2-4f66-98cd-411d10f749f0 was re-scheduled: A specified parameter was not correct: fileType [ 736.032214] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 736.032606] env[62740]: DEBUG nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 736.032783] env[62740]: DEBUG nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 736.032956] env[62740]: DEBUG nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 736.033138] env[62740]: DEBUG nova.network.neutron [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 736.379375] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg e7dd4638b9e64532b25ba5c1fb621eb8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 736.392387] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7dd4638b9e64532b25ba5c1fb621eb8 [ 736.392387] env[62740]: DEBUG nova.network.neutron [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.392387] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg df52bffbf380441aa1c0ca0472521559 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 736.409975] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df52bffbf380441aa1c0ca0472521559 [ 736.409975] env[62740]: INFO nova.compute.manager [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] [instance: 9d175573-2af2-4f66-98cd-411d10f749f0] Took 0.38 seconds to deallocate network for instance. [ 736.411721] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg 44446295d3da45a9bc5a99c225acfd55 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 736.448734] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44446295d3da45a9bc5a99c225acfd55 [ 736.452009] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg 11c4a8b9bac04c14927a863ad1247b8f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 736.481329] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11c4a8b9bac04c14927a863ad1247b8f [ 736.501254] env[62740]: INFO nova.scheduler.client.report [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Deleted allocations for instance 9d175573-2af2-4f66-98cd-411d10f749f0 [ 736.508403] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Expecting reply to msg 38bca1975e014faeacc8eabe61a41dee in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 736.523502] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38bca1975e014faeacc8eabe61a41dee [ 736.524201] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c627bd83-dd90-46d0-855d-1590964ad72d tempest-ServerActionsTestOtherB-1591768249 tempest-ServerActionsTestOtherB-1591768249-project-member] Lock "9d175573-2af2-4f66-98cd-411d10f749f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 151.546s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.524809] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 64d3207792e94901970725a40f2255b6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 736.549693] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64d3207792e94901970725a40f2255b6 [ 736.550316] env[62740]: DEBUG nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 736.552831] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 13cc459b7ea846ab8dd232e6f32c7637 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 736.586474] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13cc459b7ea846ab8dd232e6f32c7637 [ 736.601545] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.601817] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.603653] env[62740]: INFO nova.compute.claims [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 736.605270] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 05a5dd1f78de404898bbdb05214ab44d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 736.636413] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05a5dd1f78de404898bbdb05214ab44d [ 736.638133] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg efff3d6286754f7b8c011861dd4d9f44 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 736.647139] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efff3d6286754f7b8c011861dd4d9f44 [ 736.992192] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d265349-94de-4ecf-a982-030ff62535e0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.999633] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf929342-e92a-4103-b679-5633cac38a18 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.030037] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc0ebc3-d912-4311-a720-7da3b4b20c9a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.037292] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9211c0c9-feb1-48e9-823a-134b8c50f8c8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.051749] env[62740]: DEBUG nova.compute.provider_tree [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.052308] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 8d31d57ea2e147d28f9da87704ab7a5e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 737.061179] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d31d57ea2e147d28f9da87704ab7a5e [ 737.063021] env[62740]: DEBUG nova.scheduler.client.report [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 737.064396] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg b288d3e6135849cda3d92d40022aa3fa in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 737.092944] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b288d3e6135849cda3d92d40022aa3fa [ 737.093124] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.491s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.096851] env[62740]: DEBUG nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 737.096851] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 13d4144713174eca837478146019a1d8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 737.128112] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13d4144713174eca837478146019a1d8 [ 737.130127] env[62740]: DEBUG nova.compute.utils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 737.130822] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 9e748639434442f2ae7ce3a569c04f57 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 737.131787] env[62740]: DEBUG nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 737.131984] env[62740]: DEBUG nova.network.neutron [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 737.161946] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e748639434442f2ae7ce3a569c04f57 [ 737.162701] env[62740]: DEBUG nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 737.164502] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 7a5f9c9e25f14c1a8f15c8450a282c4f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 737.199468] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a5f9c9e25f14c1a8f15c8450a282c4f [ 737.201884] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg aeea123f7b194166b91977df9bba20e7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 737.229775] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aeea123f7b194166b91977df9bba20e7 [ 737.231033] env[62740]: DEBUG nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 737.257666] env[62740]: DEBUG nova.virt.hardware [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 737.257919] env[62740]: DEBUG nova.virt.hardware [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 737.258089] env[62740]: DEBUG nova.virt.hardware [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.258362] env[62740]: DEBUG nova.virt.hardware [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 737.258523] env[62740]: DEBUG nova.virt.hardware [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.258673] env[62740]: DEBUG nova.virt.hardware [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 737.258887] env[62740]: DEBUG nova.virt.hardware [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 737.259137] env[62740]: DEBUG nova.virt.hardware [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 737.259421] env[62740]: DEBUG nova.virt.hardware [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 737.259600] env[62740]: DEBUG nova.virt.hardware [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 737.259779] env[62740]: DEBUG nova.virt.hardware [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 737.260675] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326a95db-d7b9-4e39-828e-e48252d43517 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.269441] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79401ff0-4915-416c-a60e-7e056d810671 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.419757] env[62740]: DEBUG nova.policy [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a52ea2bde3c941d6b7eb2cea96d467c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3c8f3e88ed842d19db983059ac3ceb5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 738.037822] env[62740]: DEBUG nova.network.neutron [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Successfully created port: 3ea0caa1-a31f-4c85-af1a-3fc66963f536 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 739.705960] env[62740]: DEBUG nova.network.neutron [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Successfully updated port: 3ea0caa1-a31f-4c85-af1a-3fc66963f536 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 739.706223] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg e4caef0303494a5f942c1112264cfba7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 739.718417] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4caef0303494a5f942c1112264cfba7 [ 739.718944] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Acquiring lock "refresh_cache-f98589dc-ea7a-44c8-8cca-119d126ea0de" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.719100] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Acquired lock "refresh_cache-f98589dc-ea7a-44c8-8cca-119d126ea0de" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.719288] env[62740]: DEBUG nova.network.neutron [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 739.719700] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg d31a05535fc04db3afac46f6824f4b38 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 739.733933] env[62740]: DEBUG nova.compute.manager [req-c83500ba-50b1-497c-8cee-11fed552c575 req-cae1f611-30c6-4e43-8844-0557bfcea8d0 service nova] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Received event network-vif-plugged-3ea0caa1-a31f-4c85-af1a-3fc66963f536 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 739.734165] env[62740]: DEBUG oslo_concurrency.lockutils [req-c83500ba-50b1-497c-8cee-11fed552c575 req-cae1f611-30c6-4e43-8844-0557bfcea8d0 service nova] Acquiring lock "f98589dc-ea7a-44c8-8cca-119d126ea0de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.734396] env[62740]: DEBUG oslo_concurrency.lockutils [req-c83500ba-50b1-497c-8cee-11fed552c575 req-cae1f611-30c6-4e43-8844-0557bfcea8d0 service nova] Lock "f98589dc-ea7a-44c8-8cca-119d126ea0de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.734580] env[62740]: DEBUG oslo_concurrency.lockutils [req-c83500ba-50b1-497c-8cee-11fed552c575 req-cae1f611-30c6-4e43-8844-0557bfcea8d0 service nova] Lock "f98589dc-ea7a-44c8-8cca-119d126ea0de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.734752] env[62740]: DEBUG nova.compute.manager [req-c83500ba-50b1-497c-8cee-11fed552c575 req-cae1f611-30c6-4e43-8844-0557bfcea8d0 service nova] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] No waiting events found dispatching network-vif-plugged-3ea0caa1-a31f-4c85-af1a-3fc66963f536 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 739.734952] env[62740]: WARNING nova.compute.manager [req-c83500ba-50b1-497c-8cee-11fed552c575 req-cae1f611-30c6-4e43-8844-0557bfcea8d0 service nova] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Received unexpected event network-vif-plugged-3ea0caa1-a31f-4c85-af1a-3fc66963f536 for instance with vm_state building and task_state spawning. [ 739.735841] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d31a05535fc04db3afac46f6824f4b38 [ 739.794914] env[62740]: DEBUG nova.network.neutron [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 740.040759] env[62740]: DEBUG nova.network.neutron [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Updating instance_info_cache with network_info: [{"id": "3ea0caa1-a31f-4c85-af1a-3fc66963f536", "address": "fa:16:3e:ef:d8:f6", "network": {"id": "4d346837-422d-4792-bb68-a3bda0772b20", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-254417240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3c8f3e88ed842d19db983059ac3ceb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ea0caa1-a3", "ovs_interfaceid": "3ea0caa1-a31f-4c85-af1a-3fc66963f536", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.040759] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 3823f535d6b449d6865cc3cca56c0fba in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 740.065548] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3823f535d6b449d6865cc3cca56c0fba [ 740.069016] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Releasing lock "refresh_cache-f98589dc-ea7a-44c8-8cca-119d126ea0de" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.069016] env[62740]: DEBUG nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Instance network_info: |[{"id": "3ea0caa1-a31f-4c85-af1a-3fc66963f536", "address": "fa:16:3e:ef:d8:f6", "network": {"id": "4d346837-422d-4792-bb68-a3bda0772b20", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-254417240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3c8f3e88ed842d19db983059ac3ceb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ea0caa1-a3", "ovs_interfaceid": "3ea0caa1-a31f-4c85-af1a-3fc66963f536", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 740.069284] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:d8:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3ea0caa1-a31f-4c85-af1a-3fc66963f536', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.076086] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Creating folder: Project (d3c8f3e88ed842d19db983059ac3ceb5). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 740.076823] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-649665d0-498d-4d8e-a290-b4a84819caa8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.091018] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Created folder: Project (d3c8f3e88ed842d19db983059ac3ceb5) in parent group-v156037. [ 740.091018] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Creating folder: Instances. Parent ref: group-v156081. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 740.091018] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c33cc3f-4262-4e1e-aac8-cc85e706becd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.098406] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Created folder: Instances in parent group-v156081. [ 740.098831] env[62740]: DEBUG oslo.service.loopingcall [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.099157] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 740.099485] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a3b0dbc7-ce28-49c2-bc96-2c9d14e7c953 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.125764] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.125764] env[62740]: value = "task-640099" [ 740.125764] env[62740]: _type = "Task" [ 740.125764] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.137987] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640099, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.634693] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640099, 'name': CreateVM_Task, 'duration_secs': 0.305973} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.634883] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 740.635656] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.635821] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.636325] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 740.636600] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e1a555d-ea43-4a8d-9684-fe8096a3ca8c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.641403] env[62740]: DEBUG oslo_vmware.api [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Waiting for the task: (returnval){ [ 740.641403] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]524387a6-ee9d-eee5-cc16-84b9ab35b968" [ 740.641403] env[62740]: _type = "Task" [ 740.641403] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.649044] env[62740]: DEBUG oslo_vmware.api [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]524387a6-ee9d-eee5-cc16-84b9ab35b968, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.154718] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.155141] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 741.155296] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.807131] env[62740]: DEBUG nova.compute.manager [req-24b42019-8d91-480c-a87d-c5402ac05ef9 req-f48d1d73-1433-4af6-87ed-f511bd4e5eaf service nova] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Received event network-changed-3ea0caa1-a31f-4c85-af1a-3fc66963f536 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 741.807325] env[62740]: DEBUG nova.compute.manager [req-24b42019-8d91-480c-a87d-c5402ac05ef9 req-f48d1d73-1433-4af6-87ed-f511bd4e5eaf service nova] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Refreshing instance network info cache due to event network-changed-3ea0caa1-a31f-4c85-af1a-3fc66963f536. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 741.807538] env[62740]: DEBUG oslo_concurrency.lockutils [req-24b42019-8d91-480c-a87d-c5402ac05ef9 req-f48d1d73-1433-4af6-87ed-f511bd4e5eaf service nova] Acquiring lock "refresh_cache-f98589dc-ea7a-44c8-8cca-119d126ea0de" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.807680] env[62740]: DEBUG oslo_concurrency.lockutils [req-24b42019-8d91-480c-a87d-c5402ac05ef9 req-f48d1d73-1433-4af6-87ed-f511bd4e5eaf service nova] Acquired lock "refresh_cache-f98589dc-ea7a-44c8-8cca-119d126ea0de" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.807841] env[62740]: DEBUG nova.network.neutron [req-24b42019-8d91-480c-a87d-c5402ac05ef9 req-f48d1d73-1433-4af6-87ed-f511bd4e5eaf service nova] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Refreshing network info cache for port 3ea0caa1-a31f-4c85-af1a-3fc66963f536 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 741.808476] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-24b42019-8d91-480c-a87d-c5402ac05ef9 req-f48d1d73-1433-4af6-87ed-f511bd4e5eaf service nova] Expecting reply to msg 85dffd8008734730b2d09f356aca15b6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 741.817112] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85dffd8008734730b2d09f356aca15b6 [ 742.342526] env[62740]: DEBUG nova.network.neutron [req-24b42019-8d91-480c-a87d-c5402ac05ef9 req-f48d1d73-1433-4af6-87ed-f511bd4e5eaf service nova] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Updated VIF entry in instance network info cache for port 3ea0caa1-a31f-4c85-af1a-3fc66963f536. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 742.343324] env[62740]: DEBUG nova.network.neutron [req-24b42019-8d91-480c-a87d-c5402ac05ef9 req-f48d1d73-1433-4af6-87ed-f511bd4e5eaf service nova] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Updating instance_info_cache with network_info: [{"id": "3ea0caa1-a31f-4c85-af1a-3fc66963f536", "address": "fa:16:3e:ef:d8:f6", "network": {"id": "4d346837-422d-4792-bb68-a3bda0772b20", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-254417240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3c8f3e88ed842d19db983059ac3ceb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ea0caa1-a3", "ovs_interfaceid": "3ea0caa1-a31f-4c85-af1a-3fc66963f536", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.343874] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-24b42019-8d91-480c-a87d-c5402ac05ef9 req-f48d1d73-1433-4af6-87ed-f511bd4e5eaf service nova] Expecting reply to msg 15f49dc54a564584ae08b785a6045231 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 742.354295] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15f49dc54a564584ae08b785a6045231 [ 742.355378] env[62740]: DEBUG oslo_concurrency.lockutils [req-24b42019-8d91-480c-a87d-c5402ac05ef9 req-f48d1d73-1433-4af6-87ed-f511bd4e5eaf service nova] Releasing lock "refresh_cache-f98589dc-ea7a-44c8-8cca-119d126ea0de" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.975910] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Acquiring lock "f22357ec-450c-4545-8822-74b83bfc5a35" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.977181] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Lock "f22357ec-450c-4545-8822-74b83bfc5a35" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.311146] env[62740]: WARNING oslo_vmware.rw_handles [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 746.311146] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 746.311146] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 746.311146] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 746.311146] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 746.311146] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 746.311146] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 746.311146] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 746.311146] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 746.311146] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 746.311146] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 746.311146] env[62740]: ERROR oslo_vmware.rw_handles [ 746.311676] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/93aa040f-c4a6-426a-83e9-98d183992c0b/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 746.313398] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 746.313643] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Copying Virtual Disk [datastore1] vmware_temp/93aa040f-c4a6-426a-83e9-98d183992c0b/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/93aa040f-c4a6-426a-83e9-98d183992c0b/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 746.313937] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ddfc098-9e42-4a13-aaa7-4be090b35473 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.323124] env[62740]: DEBUG oslo_vmware.api [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Waiting for the task: (returnval){ [ 746.323124] env[62740]: value = "task-640100" [ 746.323124] env[62740]: _type = "Task" [ 746.323124] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.331206] env[62740]: DEBUG oslo_vmware.api [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Task: {'id': task-640100, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.833336] env[62740]: DEBUG oslo_vmware.exceptions [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 746.833639] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.834244] env[62740]: ERROR nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 746.834244] env[62740]: Faults: ['InvalidArgument'] [ 746.834244] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Traceback (most recent call last): [ 746.834244] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 746.834244] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] yield resources [ 746.834244] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 746.834244] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] self.driver.spawn(context, instance, image_meta, [ 746.834244] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 746.834244] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 746.834244] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 746.834244] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] self._fetch_image_if_missing(context, vi) [ 746.834244] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 746.834618] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] image_cache(vi, tmp_image_ds_loc) [ 746.834618] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 746.834618] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] vm_util.copy_virtual_disk( [ 746.834618] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 746.834618] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] session._wait_for_task(vmdk_copy_task) [ 746.834618] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 746.834618] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] return self.wait_for_task(task_ref) [ 746.834618] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 746.834618] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] return evt.wait() [ 746.834618] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 746.834618] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] result = hub.switch() [ 746.834618] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 746.834618] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] return self.greenlet.switch() [ 746.835015] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 746.835015] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] self.f(*self.args, **self.kw) [ 746.835015] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 746.835015] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] raise exceptions.translate_fault(task_info.error) [ 746.835015] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 746.835015] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Faults: ['InvalidArgument'] [ 746.835015] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] [ 746.835015] env[62740]: INFO nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Terminating instance [ 746.836350] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.836575] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 746.837523] env[62740]: DEBUG nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 746.837865] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 746.837944] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-247458fc-45bf-42e3-ab0c-e57fdf14593d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.840373] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f8daaf-d642-4f10-825b-4ac2d6a66777 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.847678] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 746.848715] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99ffbd90-5572-4606-93a2-7b9f78ea7f8b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.850195] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 746.850374] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 746.851075] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3de9765d-6b92-406c-a658-a813f152b928 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.855738] env[62740]: DEBUG oslo_vmware.api [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Waiting for the task: (returnval){ [ 746.855738] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5236072b-7dc6-8196-f27e-9cd52f984484" [ 746.855738] env[62740]: _type = "Task" [ 746.855738] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.864061] env[62740]: DEBUG oslo_vmware.api [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5236072b-7dc6-8196-f27e-9cd52f984484, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.916242] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 746.916548] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 746.916814] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Deleting the datastore file [datastore1] 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 746.917189] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ede574f8-1676-40eb-a117-0b2080a71358 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.924674] env[62740]: DEBUG oslo_vmware.api [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Waiting for the task: (returnval){ [ 746.924674] env[62740]: value = "task-640102" [ 746.924674] env[62740]: _type = "Task" [ 746.924674] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.935807] env[62740]: DEBUG oslo_vmware.api [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Task: {'id': task-640102, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.366798] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 747.367127] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Creating directory with path [datastore1] vmware_temp/9d4b10be-0c58-442c-9399-db90f722e4d3/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 747.367164] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3687de98-cf86-45a6-a550-3c3400123969 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.378558] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Created directory with path [datastore1] vmware_temp/9d4b10be-0c58-442c-9399-db90f722e4d3/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 747.378762] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Fetch image to [datastore1] vmware_temp/9d4b10be-0c58-442c-9399-db90f722e4d3/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 747.379129] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/9d4b10be-0c58-442c-9399-db90f722e4d3/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 747.379734] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912f2f0a-1b22-4a7d-b8bc-724c77ae015a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.386541] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe2ce26-93f4-465b-931d-8d87ff8e61ac {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.395469] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598833cf-c493-4291-b264-8c301fdca5cb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.428880] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e78d3c-474e-4e54-82ba-0ff9069f3183 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.435847] env[62740]: DEBUG oslo_vmware.api [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Task: {'id': task-640102, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070305} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.437321] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 747.437541] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 747.437723] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 747.437900] env[62740]: INFO nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Took 0.60 seconds to destroy the instance on the hypervisor. [ 747.439964] env[62740]: DEBUG nova.compute.claims [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 747.440165] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.440412] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.442291] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg f1a384e8302f4ad194ce080e45c4fac3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 747.443257] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f5c87815-1734-4a00-b413-cf357fbcc5f9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.467084] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 747.484397] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1a384e8302f4ad194ce080e45c4fac3 [ 747.536957] env[62740]: DEBUG oslo_vmware.rw_handles [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9d4b10be-0c58-442c-9399-db90f722e4d3/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 747.597656] env[62740]: DEBUG oslo_vmware.rw_handles [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 747.597656] env[62740]: DEBUG oslo_vmware.rw_handles [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9d4b10be-0c58-442c-9399-db90f722e4d3/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 747.926198] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c01957-01a3-4d2f-9c67-5fc72254f1d8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.933621] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64207ec0-785d-4ef5-8c23-54e0ff53ae98 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.963514] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875c9f27-eae1-4c00-b8a9-713f8dcb9e26 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.970759] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3ce215-bfe6-4185-82b8-d503a69ca213 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.983866] env[62740]: DEBUG nova.compute.provider_tree [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.984380] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 1290676ff84e4039bb415910ed1d3e79 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 747.999678] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1290676ff84e4039bb415910ed1d3e79 [ 748.000673] env[62740]: DEBUG nova.scheduler.client.report [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 748.006184] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 87c830fbe6c14e28916306f47c197a2f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 748.016435] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87c830fbe6c14e28916306f47c197a2f [ 748.017303] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.577s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.017827] env[62740]: ERROR nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 748.017827] env[62740]: Faults: ['InvalidArgument'] [ 748.017827] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Traceback (most recent call last): [ 748.017827] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 748.017827] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] self.driver.spawn(context, instance, image_meta, [ 748.017827] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 748.017827] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 748.017827] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 748.017827] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] self._fetch_image_if_missing(context, vi) [ 748.017827] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 748.017827] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] image_cache(vi, tmp_image_ds_loc) [ 748.017827] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 748.018140] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] vm_util.copy_virtual_disk( [ 748.018140] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 748.018140] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] session._wait_for_task(vmdk_copy_task) [ 748.018140] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 748.018140] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] return self.wait_for_task(task_ref) [ 748.018140] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 748.018140] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] return evt.wait() [ 748.018140] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 748.018140] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] result = hub.switch() [ 748.018140] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 748.018140] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] return self.greenlet.switch() [ 748.018140] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 748.018140] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] self.f(*self.args, **self.kw) [ 748.018463] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 748.018463] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] raise exceptions.translate_fault(task_info.error) [ 748.018463] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 748.018463] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Faults: ['InvalidArgument'] [ 748.018463] env[62740]: ERROR nova.compute.manager [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] [ 748.018584] env[62740]: DEBUG nova.compute.utils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 748.020137] env[62740]: DEBUG nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Build of instance 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd was re-scheduled: A specified parameter was not correct: fileType [ 748.020137] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 748.020424] env[62740]: DEBUG nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 748.020605] env[62740]: DEBUG nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 748.020765] env[62740]: DEBUG nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 748.020931] env[62740]: DEBUG nova.network.neutron [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 748.474194] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 852a1f72cde7458d9dc840371c6fb4ad in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 748.483195] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 852a1f72cde7458d9dc840371c6fb4ad [ 748.486607] env[62740]: DEBUG nova.network.neutron [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.486607] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 65a53192b75d44b4be333e6584376c44 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 748.504510] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65a53192b75d44b4be333e6584376c44 [ 748.505198] env[62740]: INFO nova.compute.manager [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd] Took 0.48 seconds to deallocate network for instance. [ 748.506859] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 1496c3f51a124c25b415b1a52da59bb0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 748.537257] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1496c3f51a124c25b415b1a52da59bb0 [ 748.540069] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 1f43f8e2aaed4f00935cd1850125f046 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 748.571798] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f43f8e2aaed4f00935cd1850125f046 [ 748.595974] env[62740]: INFO nova.scheduler.client.report [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Deleted allocations for instance 149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd [ 748.601837] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 3ef415e805904321957793298440d8ae in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 748.632854] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ef415e805904321957793298440d8ae [ 748.633597] env[62740]: DEBUG oslo_concurrency.lockutils [None req-57b4a7af-4b0f-40df-9f8e-997629e42b92 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "149ea1a1-2c8e-4c3e-9094-6bb78e5c89fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 136.711s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.634342] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 71622be640194e2488450cc4f796870e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 748.647672] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71622be640194e2488450cc4f796870e [ 748.648164] env[62740]: DEBUG nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 748.650277] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg bb047b88cb47446e98809e6439a77671 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 748.681750] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb047b88cb47446e98809e6439a77671 [ 748.698082] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.698372] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.703015] env[62740]: INFO nova.compute.claims [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 748.703015] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 79af1a8e591f46a3bb5867170d955f00 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 748.734590] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79af1a8e591f46a3bb5867170d955f00 [ 748.736478] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 234cba7fe524432e9f8a142c66e8146e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 748.744107] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 234cba7fe524432e9f8a142c66e8146e [ 749.111428] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9976441e-f1e1-486b-89e2-9980084f0069 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.119306] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61186385-8a20-4b4b-8ef4-db0d019e7449 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.151675] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7cc06d2-2105-49cd-8131-25e892326104 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.159909] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c96b45-3701-4160-8185-67958cca9d05 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.173727] env[62740]: DEBUG nova.compute.provider_tree [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.174192] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 3013e189a8084de1b03f21e1cea4e197 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 749.183650] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3013e189a8084de1b03f21e1cea4e197 [ 749.184600] env[62740]: DEBUG nova.scheduler.client.report [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 749.187016] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg cbe6a943547c4388a2647d8cf1f508ed in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 749.201171] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbe6a943547c4388a2647d8cf1f508ed [ 749.201908] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.504s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.202390] env[62740]: DEBUG nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 749.204025] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 54b3d12355804ffba6e7ddd5997c1f0b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 749.236034] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54b3d12355804ffba6e7ddd5997c1f0b [ 749.236865] env[62740]: DEBUG nova.compute.utils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 749.237486] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 107c1a7675794534aafc5f787e30666e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 749.242023] env[62740]: DEBUG nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 749.242023] env[62740]: DEBUG nova.network.neutron [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 749.246894] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 107c1a7675794534aafc5f787e30666e [ 749.248235] env[62740]: DEBUG nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 749.249124] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg a21bfe3d022b4d699291b56295f2cbc8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 749.279587] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a21bfe3d022b4d699291b56295f2cbc8 [ 749.282510] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg a59ed781346e455ab824e361bc767044 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 749.313189] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a59ed781346e455ab824e361bc767044 [ 749.313957] env[62740]: DEBUG nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 749.327879] env[62740]: DEBUG nova.policy [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '865d4b967faf4e28bcb9eca45175e5d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f91cddc4ce184025b45b526c8bc56a5b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 749.344791] env[62740]: DEBUG nova.virt.hardware [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 749.345256] env[62740]: DEBUG nova.virt.hardware [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 749.346026] env[62740]: DEBUG nova.virt.hardware [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.346026] env[62740]: DEBUG nova.virt.hardware [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 749.346026] env[62740]: DEBUG nova.virt.hardware [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.346261] env[62740]: DEBUG nova.virt.hardware [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 749.350017] env[62740]: DEBUG nova.virt.hardware [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 749.350017] env[62740]: DEBUG nova.virt.hardware [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 749.350017] env[62740]: DEBUG nova.virt.hardware [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 749.350017] env[62740]: DEBUG nova.virt.hardware [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 749.350017] env[62740]: DEBUG nova.virt.hardware [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 749.350210] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f49377-fe0b-4491-abdd-81197bc03edf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.357057] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507dbdc7-3301-4f3e-9a6b-65f9dbd287ae {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.765641] env[62740]: DEBUG nova.network.neutron [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Successfully created port: 277b87d8-d51d-40e3-90fa-28868725759f {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 750.750019] env[62740]: DEBUG nova.network.neutron [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Successfully updated port: 277b87d8-d51d-40e3-90fa-28868725759f {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 750.750858] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 806eafdb14cd4ba4954f90da3c95ef26 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 750.761260] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 806eafdb14cd4ba4954f90da3c95ef26 [ 750.762009] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquiring lock "refresh_cache-fa5248d1-bddf-4244-a363-2113b0473980" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.762158] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquired lock "refresh_cache-fa5248d1-bddf-4244-a363-2113b0473980" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.762311] env[62740]: DEBUG nova.network.neutron [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 750.762698] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 9524c84c84b94d2dac1da4e259c427a3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 750.772730] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9524c84c84b94d2dac1da4e259c427a3 [ 750.809883] env[62740]: DEBUG nova.network.neutron [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 750.866421] env[62740]: DEBUG nova.compute.manager [req-afc46315-d2e3-4a58-8174-5600a9713a1b req-74385325-6ffa-4853-8577-1dee7646a9d2 service nova] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Received event network-vif-plugged-277b87d8-d51d-40e3-90fa-28868725759f {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 750.866659] env[62740]: DEBUG oslo_concurrency.lockutils [req-afc46315-d2e3-4a58-8174-5600a9713a1b req-74385325-6ffa-4853-8577-1dee7646a9d2 service nova] Acquiring lock "fa5248d1-bddf-4244-a363-2113b0473980-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.866848] env[62740]: DEBUG oslo_concurrency.lockutils [req-afc46315-d2e3-4a58-8174-5600a9713a1b req-74385325-6ffa-4853-8577-1dee7646a9d2 service nova] Lock "fa5248d1-bddf-4244-a363-2113b0473980-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.866987] env[62740]: DEBUG oslo_concurrency.lockutils [req-afc46315-d2e3-4a58-8174-5600a9713a1b req-74385325-6ffa-4853-8577-1dee7646a9d2 service nova] Lock "fa5248d1-bddf-4244-a363-2113b0473980-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.867166] env[62740]: DEBUG nova.compute.manager [req-afc46315-d2e3-4a58-8174-5600a9713a1b req-74385325-6ffa-4853-8577-1dee7646a9d2 service nova] [instance: fa5248d1-bddf-4244-a363-2113b0473980] No waiting events found dispatching network-vif-plugged-277b87d8-d51d-40e3-90fa-28868725759f {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 750.867352] env[62740]: WARNING nova.compute.manager [req-afc46315-d2e3-4a58-8174-5600a9713a1b req-74385325-6ffa-4853-8577-1dee7646a9d2 service nova] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Received unexpected event network-vif-plugged-277b87d8-d51d-40e3-90fa-28868725759f for instance with vm_state building and task_state spawning. [ 751.056481] env[62740]: DEBUG nova.network.neutron [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Updating instance_info_cache with network_info: [{"id": "277b87d8-d51d-40e3-90fa-28868725759f", "address": "fa:16:3e:56:17:06", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277b87d8-d5", "ovs_interfaceid": "277b87d8-d51d-40e3-90fa-28868725759f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.057010] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg f73e3edffe6746498ebf797248f34290 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 751.067342] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f73e3edffe6746498ebf797248f34290 [ 751.067930] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Releasing lock "refresh_cache-fa5248d1-bddf-4244-a363-2113b0473980" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.068270] env[62740]: DEBUG nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Instance network_info: |[{"id": "277b87d8-d51d-40e3-90fa-28868725759f", "address": "fa:16:3e:56:17:06", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277b87d8-d5", "ovs_interfaceid": "277b87d8-d51d-40e3-90fa-28868725759f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 751.068667] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:17:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '277b87d8-d51d-40e3-90fa-28868725759f', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 751.076144] env[62740]: DEBUG oslo.service.loopingcall [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 751.076605] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 751.076832] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f8832b6-49dd-4a1f-b86e-e892167d72f3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.097033] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 751.097033] env[62740]: value = "task-640103" [ 751.097033] env[62740]: _type = "Task" [ 751.097033] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.104647] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640103, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.607377] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640103, 'name': CreateVM_Task} progress is 25%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.107718] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640103, 'name': CreateVM_Task} progress is 25%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.608687] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640103, 'name': CreateVM_Task} progress is 25%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.956048] env[62740]: DEBUG nova.compute.manager [req-825ec728-8383-4a76-9f5f-37be965380fc req-7bd3e2b8-d912-4336-9d4d-22421cc13144 service nova] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Received event network-changed-277b87d8-d51d-40e3-90fa-28868725759f {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 752.956239] env[62740]: DEBUG nova.compute.manager [req-825ec728-8383-4a76-9f5f-37be965380fc req-7bd3e2b8-d912-4336-9d4d-22421cc13144 service nova] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Refreshing instance network info cache due to event network-changed-277b87d8-d51d-40e3-90fa-28868725759f. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 752.956463] env[62740]: DEBUG oslo_concurrency.lockutils [req-825ec728-8383-4a76-9f5f-37be965380fc req-7bd3e2b8-d912-4336-9d4d-22421cc13144 service nova] Acquiring lock "refresh_cache-fa5248d1-bddf-4244-a363-2113b0473980" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.956608] env[62740]: DEBUG oslo_concurrency.lockutils [req-825ec728-8383-4a76-9f5f-37be965380fc req-7bd3e2b8-d912-4336-9d4d-22421cc13144 service nova] Acquired lock "refresh_cache-fa5248d1-bddf-4244-a363-2113b0473980" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.956770] env[62740]: DEBUG nova.network.neutron [req-825ec728-8383-4a76-9f5f-37be965380fc req-7bd3e2b8-d912-4336-9d4d-22421cc13144 service nova] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Refreshing network info cache for port 277b87d8-d51d-40e3-90fa-28868725759f {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 752.957523] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-825ec728-8383-4a76-9f5f-37be965380fc req-7bd3e2b8-d912-4336-9d4d-22421cc13144 service nova] Expecting reply to msg 72018267d14a4fd080687976e2f84d54 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 752.964431] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72018267d14a4fd080687976e2f84d54 [ 753.111956] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640103, 'name': CreateVM_Task} progress is 25%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.229446] env[62740]: DEBUG nova.network.neutron [req-825ec728-8383-4a76-9f5f-37be965380fc req-7bd3e2b8-d912-4336-9d4d-22421cc13144 service nova] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Updated VIF entry in instance network info cache for port 277b87d8-d51d-40e3-90fa-28868725759f. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 753.229803] env[62740]: DEBUG nova.network.neutron [req-825ec728-8383-4a76-9f5f-37be965380fc req-7bd3e2b8-d912-4336-9d4d-22421cc13144 service nova] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Updating instance_info_cache with network_info: [{"id": "277b87d8-d51d-40e3-90fa-28868725759f", "address": "fa:16:3e:56:17:06", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277b87d8-d5", "ovs_interfaceid": "277b87d8-d51d-40e3-90fa-28868725759f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.230333] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-825ec728-8383-4a76-9f5f-37be965380fc req-7bd3e2b8-d912-4336-9d4d-22421cc13144 service nova] Expecting reply to msg e7cc4f2ff93b41da922ea458e2459656 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 753.238742] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7cc4f2ff93b41da922ea458e2459656 [ 753.239301] env[62740]: DEBUG oslo_concurrency.lockutils [req-825ec728-8383-4a76-9f5f-37be965380fc req-7bd3e2b8-d912-4336-9d4d-22421cc13144 service nova] Releasing lock "refresh_cache-fa5248d1-bddf-4244-a363-2113b0473980" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.610516] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640103, 'name': CreateVM_Task} progress is 25%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.111686] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640103, 'name': CreateVM_Task, 'duration_secs': 2.853886} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.111686] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 754.112269] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.112517] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.113643] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 754.113643] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee67f027-721b-496f-8406-db8e46138808 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.117655] env[62740]: DEBUG oslo_vmware.api [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Waiting for the task: (returnval){ [ 754.117655] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52f257d6-188b-28d1-9b3d-06280d123053" [ 754.117655] env[62740]: _type = "Task" [ 754.117655] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.127958] env[62740]: DEBUG oslo_vmware.api [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52f257d6-188b-28d1-9b3d-06280d123053, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.628138] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.628425] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 754.628644] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.891717] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.891717] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 761.891717] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 761.892595] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg af4b87914d634dab96bd14286c362bab in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 761.913095] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af4b87914d634dab96bd14286c362bab [ 761.915379] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 761.915585] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: fece072a-baac-4301-988c-0068d6e71cff] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 761.915763] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 761.915931] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 761.916130] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 761.916400] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 761.916574] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 761.916705] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 761.916829] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 761.916948] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 761.917081] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 763.890051] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 763.890446] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 763.890446] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 763.890550] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 763.890899] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ec8846446d574204b8c4d29b413f808c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 763.900531] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec8846446d574204b8c4d29b413f808c [ 763.901572] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.901786] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.901952] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.902119] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 763.903212] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d83d7d0-67ba-4502-a618-c13e673f2d1e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.911984] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c4ed7f-dce9-465b-92df-28c10e25a289 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.926059] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0956c9e-64f7-4cb5-8d8b-1bd30759e3d5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.932298] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85e4eaa-c76f-4a0a-8578-6cf05e224ed8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.962270] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181680MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 763.962392] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.963174] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.964137] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ce585ecb332949218b8f7f74b88ade2f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 763.999532] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce585ecb332949218b8f7f74b88ade2f [ 764.003586] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg dd6ef8d6a08f4c8099e8e1dfadd60238 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.013649] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd6ef8d6a08f4c8099e8e1dfadd60238 [ 764.042464] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b326be2c-43f2-4f04-9652-cec7e017288e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.042626] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance fece072a-baac-4301-988c-0068d6e71cff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.042757] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3102cc87-df1a-4de8-bfdb-9b904f40ea2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.042882] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4f0d1356-bdfb-4cb2-979a-e28f9025b311 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.043012] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 75050b95-60c6-4e44-a1d5-0d47492dd739 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.043146] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 8053d2ae-ca61-4282-aa89-83f3a2e107bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.043268] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a24df1e4-2865-4ab3-beae-0892dca12bef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.043389] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance fedb62e0-2602-4772-9e5d-00645922d2a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.043506] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f98589dc-ea7a-44c8-8cca-119d126ea0de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.043624] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance fa5248d1-bddf-4244-a363-2113b0473980 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.044275] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ba0ba691438c41aaadb5f4399de5dc47 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.055089] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba0ba691438c41aaadb5f4399de5dc47 [ 764.057141] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6ec38a6c-f4b2-42ce-b371-5fe82d577545 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.057141] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg be03bcd8ddfe45cd8af30fd853eaabf0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.066008] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be03bcd8ddfe45cd8af30fd853eaabf0 [ 764.066713] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 26712c18-d9f4-4d7d-80fb-4d527da9c1e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.068138] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 82a608748d54490f8bd847b6bf346e82 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.077028] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82a608748d54490f8bd847b6bf346e82 [ 764.077602] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance ab15259f-6344-4ba0-9abd-8b0ee7df59fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.078124] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8736da8e589146249a59df98e78064d7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.087535] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8736da8e589146249a59df98e78064d7 [ 764.089028] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 425930c0-b9f8-4966-ae9d-0687d0a07213 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.089028] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2697ee6fd16a46049aa1609751ca8bbc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.097816] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2697ee6fd16a46049aa1609751ca8bbc [ 764.099814] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b2ec3212-25e1-4027-801d-a23309a4d0e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.099814] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7009f1f6f78a43d692ba7e0effe24d73 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.108359] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7009f1f6f78a43d692ba7e0effe24d73 [ 764.109495] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1ae43e6d-c9ac-494d-a7a9-1f6ff538345a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.109652] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 695989ac9c304c3fb19b025f395b27a6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.118608] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 695989ac9c304c3fb19b025f395b27a6 [ 764.119278] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d490ad64-a2fe-4c08-b0fc-56b2e00d9c98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.119765] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 25db5d545e1a49c28c5ba568735ec5b2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.128266] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25db5d545e1a49c28c5ba568735ec5b2 [ 764.128863] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f7a74539-8a36-450f-aec4-d059670e8f38 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.129346] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5b1a0b74bd4e406197128d1797e69be0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.137859] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b1a0b74bd4e406197128d1797e69be0 [ 764.138489] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e473e254-387f-4581-97bc-bdeab221b10f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.138929] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 6468e76f97e54e4a9393fa583caa55a0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.148665] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6468e76f97e54e4a9393fa583caa55a0 [ 764.149343] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 68aa9321-22ce-45a0-8323-fa8564dca46b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.149787] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 182653e1729b4cf4be46167dbd8793bb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.157991] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 182653e1729b4cf4be46167dbd8793bb [ 764.158614] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4ec9a397-1e4b-4767-b926-ccc6f63a951c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.159050] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 3843b942ce9b4cccb7fec33b5b4ed805 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.167128] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3843b942ce9b4cccb7fec33b5b4ed805 [ 764.167705] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e1c7a748-b3f3-41b7-8784-13699549a01d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.168241] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 3a251df90b954a4980904df373afa15d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.176302] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a251df90b954a4980904df373afa15d [ 764.176910] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 25cef75e-2176-4999-965b-155cd7f8d137 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.177348] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 6e607e7dbb734133bd719c72a00b9415 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.186372] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e607e7dbb734133bd719c72a00b9415 [ 764.187012] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 0e5caaed-20ff-40bd-b0cf-016ac18642cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.187479] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0e0c50040fb442909abe6678e0f8f872 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.197633] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e0c50040fb442909abe6678e0f8f872 [ 764.198324] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b9840eca-ec5f-4a8c-9bdf-1212e2640e5c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.198780] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8e566255bbfc4cfe8a39bc663e1b38c5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.207724] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e566255bbfc4cfe8a39bc663e1b38c5 [ 764.208694] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance ec703551-5c8d-43bb-b727-709aeeeac9a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.208902] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 99dce5c110164a6290ce4a69a0ca29a0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.218278] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99dce5c110164a6290ce4a69a0ca29a0 [ 764.219221] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 00085df9-ce61-4ccc-8ecf-16956109eb8f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.219688] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b1c99d04e111423480e39d1450d85bf3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.228289] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1c99d04e111423480e39d1450d85bf3 [ 764.228902] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e5b0daa1-6745-48ad-8e69-6c7362bac085 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.229368] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg a5387e14d06c4e27bd476550fa513d4b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.239021] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5387e14d06c4e27bd476550fa513d4b [ 764.239719] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e8bb18d5-207c-48fb-b7e5-06b72ce61b4f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.240171] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg c57e853bc24f4a39995e7e681a4c6e00 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.249382] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c57e853bc24f4a39995e7e681a4c6e00 [ 764.250105] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 697e3884-2ef4-423e-af81-e5d1e94f65a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.250498] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b04c42ea4ac94f1e95aed2594a5acddb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.259586] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b04c42ea4ac94f1e95aed2594a5acddb [ 764.260201] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f22357ec-450c-4545-8822-74b83bfc5a35 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.260440] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 764.260668] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 764.602713] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32dd398f-796e-4686-9b0b-3e1d919d0051 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.610541] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1050895f-676e-47b7-9528-eaa01bed2c05 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.640552] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0755f39e-968e-494c-b049-dc3b44e05714 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.647176] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59272a90-93e3-4725-9265-4089c7835853 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.659982] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.660477] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 65094fb3482443fe97b66ac7ca11d927 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.667739] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65094fb3482443fe97b66ac7ca11d927 [ 764.668727] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 764.671101] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0e3d5b9621f447afaeb4c85c4e3fdfe7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 764.686465] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e3d5b9621f447afaeb4c85c4e3fdfe7 [ 764.687051] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 764.687051] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.724s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.683706] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.683967] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.684580] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 815294aa895147fd9a6eccb7256c51d5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 765.702633] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 815294aa895147fd9a6eccb7256c51d5 [ 765.709769] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.709968] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.710182] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 766.890657] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 770.174281] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d4ccee1d277e4aad9ebf50525b4909ec in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 770.184068] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4ccee1d277e4aad9ebf50525b4909ec [ 781.324344] env[62740]: WARNING oslo_vmware.rw_handles [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 781.324344] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 781.324344] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 781.324344] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 781.324344] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 781.324344] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 781.324344] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 781.324344] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 781.324344] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 781.324344] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 781.324344] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 781.324344] env[62740]: ERROR oslo_vmware.rw_handles [ 781.324977] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/07c0d9ff-313d-4bf0-acec-e33e19cb7bea/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 781.326898] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 781.327176] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Copying Virtual Disk [datastore2] vmware_temp/07c0d9ff-313d-4bf0-acec-e33e19cb7bea/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/07c0d9ff-313d-4bf0-acec-e33e19cb7bea/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 781.327467] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49617b4a-7dcd-44ea-b644-63e374510748 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.342133] env[62740]: DEBUG oslo_vmware.api [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Waiting for the task: (returnval){ [ 781.342133] env[62740]: value = "task-640104" [ 781.342133] env[62740]: _type = "Task" [ 781.342133] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.354605] env[62740]: DEBUG oslo_vmware.api [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Task: {'id': task-640104, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.853576] env[62740]: DEBUG oslo_vmware.exceptions [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 781.853864] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.854419] env[62740]: ERROR nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 781.854419] env[62740]: Faults: ['InvalidArgument'] [ 781.854419] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Traceback (most recent call last): [ 781.854419] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 781.854419] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] yield resources [ 781.854419] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 781.854419] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] self.driver.spawn(context, instance, image_meta, [ 781.854419] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 781.854419] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 781.854419] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 781.854419] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] self._fetch_image_if_missing(context, vi) [ 781.854419] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 781.854783] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] image_cache(vi, tmp_image_ds_loc) [ 781.854783] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 781.854783] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] vm_util.copy_virtual_disk( [ 781.854783] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 781.854783] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] session._wait_for_task(vmdk_copy_task) [ 781.854783] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 781.854783] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] return self.wait_for_task(task_ref) [ 781.854783] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 781.854783] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] return evt.wait() [ 781.854783] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 781.854783] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] result = hub.switch() [ 781.854783] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 781.854783] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] return self.greenlet.switch() [ 781.855129] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 781.855129] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] self.f(*self.args, **self.kw) [ 781.855129] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 781.855129] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] raise exceptions.translate_fault(task_info.error) [ 781.855129] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 781.855129] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Faults: ['InvalidArgument'] [ 781.855129] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] [ 781.855129] env[62740]: INFO nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Terminating instance [ 781.856356] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.856568] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 781.857192] env[62740]: DEBUG nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 781.857385] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 781.857604] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88866da2-e2f6-4991-96c5-7e9888aacafb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.859878] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f33c06e-125e-4f3a-b0bd-648820aada1f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.866692] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 781.866937] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33a8aeee-15b7-49cb-baf1-4faf7f0fa0ff {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.869350] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 781.869395] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 781.870358] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f89d69e8-dfe1-44f7-8f3d-7380541b05ba {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.874879] env[62740]: DEBUG oslo_vmware.api [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Waiting for the task: (returnval){ [ 781.874879] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]520a9c18-cef8-c526-3b7a-74a052209da1" [ 781.874879] env[62740]: _type = "Task" [ 781.874879] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.882241] env[62740]: DEBUG oslo_vmware.api [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]520a9c18-cef8-c526-3b7a-74a052209da1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.936352] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 781.936675] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 781.936871] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Deleting the datastore file [datastore2] b326be2c-43f2-4f04-9652-cec7e017288e {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 781.937206] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c381ba66-9347-49f1-b389-27837602934d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.942814] env[62740]: DEBUG oslo_vmware.api [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Waiting for the task: (returnval){ [ 781.942814] env[62740]: value = "task-640106" [ 781.942814] env[62740]: _type = "Task" [ 781.942814] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.950282] env[62740]: DEBUG oslo_vmware.api [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Task: {'id': task-640106, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.385716] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 782.386073] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Creating directory with path [datastore2] vmware_temp/98a1b38b-ccdd-46ea-bfbf-ded3a9c5e5cb/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 782.386258] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f0556589-8d53-4749-93af-c9ee7b256cbe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.398152] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Created directory with path [datastore2] vmware_temp/98a1b38b-ccdd-46ea-bfbf-ded3a9c5e5cb/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 782.398288] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Fetch image to [datastore2] vmware_temp/98a1b38b-ccdd-46ea-bfbf-ded3a9c5e5cb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 782.398476] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/98a1b38b-ccdd-46ea-bfbf-ded3a9c5e5cb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 782.399266] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10429079-be3c-448e-a1aa-ead2733d4344 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.405851] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37021325-eda8-40b4-b36e-e35755e48452 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.415452] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514d362a-ec22-4749-b721-658e9b439e5b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.447706] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a3df7c-7bd9-4bd9-b928-6c72509c3590 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.456698] env[62740]: DEBUG oslo_vmware.api [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Task: {'id': task-640106, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075597} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.457234] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 782.457453] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 782.457644] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 782.457824] env[62740]: INFO nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 782.459415] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1ec4ff7c-7407-4a43-896e-2c86252c61c6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.461337] env[62740]: DEBUG nova.compute.claims [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 782.461526] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.461741] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.463670] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 9b303480004a4ee1a078d15383ba45db in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 782.483180] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 782.505539] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b303480004a4ee1a078d15383ba45db [ 782.540886] env[62740]: DEBUG oslo_vmware.rw_handles [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/98a1b38b-ccdd-46ea-bfbf-ded3a9c5e5cb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 782.602962] env[62740]: DEBUG oslo_vmware.rw_handles [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 782.603215] env[62740]: DEBUG oslo_vmware.rw_handles [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/98a1b38b-ccdd-46ea-bfbf-ded3a9c5e5cb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 782.920749] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f78737-f1ae-4cef-9d68-115f2765dc45 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.927878] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54854d9-43a3-4bf7-9f7b-0b8295b95eba {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.957787] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f3ae07-d21d-4e9f-bd92-a9b3ec77c337 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.964801] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6aaf40c-47bf-49c8-aeeb-fe569fc0b934 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.977566] env[62740]: DEBUG nova.compute.provider_tree [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.978086] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 4bc18119e5b541cbac1d93d899aeeacd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 782.986277] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bc18119e5b541cbac1d93d899aeeacd [ 782.987484] env[62740]: DEBUG nova.scheduler.client.report [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 782.990151] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 2d0b2d281ee14faba7541083ab9170b2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 783.001365] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d0b2d281ee14faba7541083ab9170b2 [ 783.002193] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.540s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.002596] env[62740]: ERROR nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 783.002596] env[62740]: Faults: ['InvalidArgument'] [ 783.002596] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Traceback (most recent call last): [ 783.002596] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 783.002596] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] self.driver.spawn(context, instance, image_meta, [ 783.002596] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 783.002596] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 783.002596] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 783.002596] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] self._fetch_image_if_missing(context, vi) [ 783.002596] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 783.002596] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] image_cache(vi, tmp_image_ds_loc) [ 783.002596] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 783.003140] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] vm_util.copy_virtual_disk( [ 783.003140] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 783.003140] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] session._wait_for_task(vmdk_copy_task) [ 783.003140] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 783.003140] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] return self.wait_for_task(task_ref) [ 783.003140] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 783.003140] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] return evt.wait() [ 783.003140] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 783.003140] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] result = hub.switch() [ 783.003140] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 783.003140] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] return self.greenlet.switch() [ 783.003140] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 783.003140] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] self.f(*self.args, **self.kw) [ 783.003681] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 783.003681] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] raise exceptions.translate_fault(task_info.error) [ 783.003681] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 783.003681] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Faults: ['InvalidArgument'] [ 783.003681] env[62740]: ERROR nova.compute.manager [instance: b326be2c-43f2-4f04-9652-cec7e017288e] [ 783.003681] env[62740]: DEBUG nova.compute.utils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 783.004777] env[62740]: DEBUG nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Build of instance b326be2c-43f2-4f04-9652-cec7e017288e was re-scheduled: A specified parameter was not correct: fileType [ 783.004777] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 783.005192] env[62740]: DEBUG nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 783.005372] env[62740]: DEBUG nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 783.005659] env[62740]: DEBUG nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 783.005740] env[62740]: DEBUG nova.network.neutron [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 783.396258] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg ea7cdbfb4cdc450eb6e07bac38340c34 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 783.409372] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea7cdbfb4cdc450eb6e07bac38340c34 [ 783.411245] env[62740]: DEBUG nova.network.neutron [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.411245] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg bd39099953924df0a792a1d5820b32c3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 783.424959] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd39099953924df0a792a1d5820b32c3 [ 783.425593] env[62740]: INFO nova.compute.manager [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] [instance: b326be2c-43f2-4f04-9652-cec7e017288e] Took 0.42 seconds to deallocate network for instance. [ 783.427311] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 8f712628d34e45ef83f39905c2aea012 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 783.464476] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f712628d34e45ef83f39905c2aea012 [ 783.467176] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 69b8159d1a914b3a95f993102d2494ed in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 783.500293] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69b8159d1a914b3a95f993102d2494ed [ 783.534099] env[62740]: INFO nova.scheduler.client.report [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Deleted allocations for instance b326be2c-43f2-4f04-9652-cec7e017288e [ 783.540444] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Expecting reply to msg 961750c3865b42098ddae7fadc6e60dc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 783.555896] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 961750c3865b42098ddae7fadc6e60dc [ 783.556576] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1f643a3c-df3d-420e-b9d5-5243d9ace351 tempest-ImagesOneServerTestJSON-713630108 tempest-ImagesOneServerTestJSON-713630108-project-member] Lock "b326be2c-43f2-4f04-9652-cec7e017288e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 192.196s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.557151] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 52066b8fd5094aee9a34a6252efb03d6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 783.567578] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52066b8fd5094aee9a34a6252efb03d6 [ 783.568165] env[62740]: DEBUG nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 783.569996] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 496f8accaed64868a9c6a6b2131383f9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 783.604816] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 496f8accaed64868a9c6a6b2131383f9 [ 783.619651] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.619900] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.621414] env[62740]: INFO nova.compute.claims [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 783.623008] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg e282b4c32aa94961a69c8781cf5d1774 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 783.660826] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e282b4c32aa94961a69c8781cf5d1774 [ 783.660826] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 0274a282e7b54a33af0cc27cf3ad0e83 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 783.669380] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0274a282e7b54a33af0cc27cf3ad0e83 [ 784.011806] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1961a3dd-2e2d-48aa-a68e-b529d375960d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.020436] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e3acce-5f8f-4e0d-aef8-a91986a6ce2c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.050521] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843161b7-e1a2-4798-a04a-0bb036dc9f7b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.058077] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfdb8857-f714-4aea-a544-bf606997f3a4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.071220] env[62740]: DEBUG nova.compute.provider_tree [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.071821] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 3d1110a82d994aadae9b11c41ce12841 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 784.080457] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d1110a82d994aadae9b11c41ce12841 [ 784.081300] env[62740]: DEBUG nova.scheduler.client.report [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 784.083701] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg bab6954e24914c00a7809422a7f6fc7d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 784.095546] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bab6954e24914c00a7809422a7f6fc7d [ 784.096340] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.476s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.096814] env[62740]: DEBUG nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 784.098457] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 3c79b43111ec48819eaec3ec4997c623 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 784.134800] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c79b43111ec48819eaec3ec4997c623 [ 784.136169] env[62740]: DEBUG nova.compute.utils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 784.136884] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 6c803adc04ab43f59afcba29b11e6b26 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 784.137839] env[62740]: DEBUG nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 784.138045] env[62740]: DEBUG nova.network.neutron [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 784.148868] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c803adc04ab43f59afcba29b11e6b26 [ 784.149520] env[62740]: DEBUG nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 784.151129] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 296993ea6eb849d9b41dcbf1cffea183 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 784.187282] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 296993ea6eb849d9b41dcbf1cffea183 [ 784.190899] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 4cd20aabbf914ff0b27915817cc2928a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 784.224739] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cd20aabbf914ff0b27915817cc2928a [ 784.224739] env[62740]: DEBUG nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 784.233557] env[62740]: DEBUG nova.policy [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd31e1e6fa7347dd9a5e9b7075fe85ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e28b312994174de0aa861f03d28f123d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 784.253176] env[62740]: DEBUG nova.virt.hardware [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 784.253427] env[62740]: DEBUG nova.virt.hardware [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 784.253590] env[62740]: DEBUG nova.virt.hardware [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 784.253776] env[62740]: DEBUG nova.virt.hardware [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 784.253927] env[62740]: DEBUG nova.virt.hardware [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 784.254100] env[62740]: DEBUG nova.virt.hardware [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 784.254316] env[62740]: DEBUG nova.virt.hardware [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 784.254478] env[62740]: DEBUG nova.virt.hardware [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 784.254648] env[62740]: DEBUG nova.virt.hardware [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 784.254813] env[62740]: DEBUG nova.virt.hardware [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 784.254988] env[62740]: DEBUG nova.virt.hardware [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 784.257355] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-548662d6-57db-4f34-8d5c-9745c7aac0ec {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.268077] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c12cc46-f35d-4601-9419-ff6dea2cd705 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.694237] env[62740]: DEBUG nova.network.neutron [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Successfully created port: b970c7e6-56af-4b0a-988b-96c733aa489a {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.386990] env[62740]: DEBUG nova.network.neutron [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Successfully updated port: b970c7e6-56af-4b0a-988b-96c733aa489a {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 785.387382] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg e480d329f37a4667b4c43d0e4042b877 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 785.397231] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e480d329f37a4667b4c43d0e4042b877 [ 785.397998] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Acquiring lock "refresh_cache-6ec38a6c-f4b2-42ce-b371-5fe82d577545" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.398145] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Acquired lock "refresh_cache-6ec38a6c-f4b2-42ce-b371-5fe82d577545" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.398370] env[62740]: DEBUG nova.network.neutron [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 785.398750] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 2db764f2d580432792a87c84b7c90d0d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 785.407716] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2db764f2d580432792a87c84b7c90d0d [ 785.455220] env[62740]: DEBUG nova.network.neutron [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 785.728609] env[62740]: DEBUG nova.network.neutron [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Updating instance_info_cache with network_info: [{"id": "b970c7e6-56af-4b0a-988b-96c733aa489a", "address": "fa:16:3e:57:4b:3f", "network": {"id": "7b13e1f8-2a70-4b95-8827-f4dc6674d116", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-649938031-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e28b312994174de0aa861f03d28f123d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb970c7e6-56", "ovs_interfaceid": "b970c7e6-56af-4b0a-988b-96c733aa489a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.728609] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg c2dd9cd6e7ae4297ac3a6162d90ac20b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 785.743895] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2dd9cd6e7ae4297ac3a6162d90ac20b [ 785.744869] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Releasing lock "refresh_cache-6ec38a6c-f4b2-42ce-b371-5fe82d577545" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.747015] env[62740]: DEBUG nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Instance network_info: |[{"id": "b970c7e6-56af-4b0a-988b-96c733aa489a", "address": "fa:16:3e:57:4b:3f", "network": {"id": "7b13e1f8-2a70-4b95-8827-f4dc6674d116", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-649938031-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e28b312994174de0aa861f03d28f123d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb970c7e6-56", "ovs_interfaceid": "b970c7e6-56af-4b0a-988b-96c733aa489a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 785.747149] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:4b:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b970c7e6-56af-4b0a-988b-96c733aa489a', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 785.755898] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Creating folder: Project (e28b312994174de0aa861f03d28f123d). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 785.757040] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25452549-0685-4af8-b9a1-7346d0afff6c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.770513] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Created folder: Project (e28b312994174de0aa861f03d28f123d) in parent group-v156037. [ 785.770858] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Creating folder: Instances. Parent ref: group-v156085. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 785.771310] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89136c4b-dc5a-41df-aec8-2c678f53f7b0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.781965] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Created folder: Instances in parent group-v156085. [ 785.781965] env[62740]: DEBUG oslo.service.loopingcall [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 785.781965] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 785.781965] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1acbf7b9-3626-4e49-ba3b-deb3c18d3627 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.802096] env[62740]: DEBUG nova.compute.manager [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Received event network-vif-plugged-b970c7e6-56af-4b0a-988b-96c733aa489a {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 785.802140] env[62740]: DEBUG oslo_concurrency.lockutils [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] Acquiring lock "6ec38a6c-f4b2-42ce-b371-5fe82d577545-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.802353] env[62740]: DEBUG oslo_concurrency.lockutils [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] Lock "6ec38a6c-f4b2-42ce-b371-5fe82d577545-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.802551] env[62740]: DEBUG oslo_concurrency.lockutils [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] Lock "6ec38a6c-f4b2-42ce-b371-5fe82d577545-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.802695] env[62740]: DEBUG nova.compute.manager [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] No waiting events found dispatching network-vif-plugged-b970c7e6-56af-4b0a-988b-96c733aa489a {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 785.803171] env[62740]: WARNING nova.compute.manager [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Received unexpected event network-vif-plugged-b970c7e6-56af-4b0a-988b-96c733aa489a for instance with vm_state building and task_state spawning. [ 785.803366] env[62740]: DEBUG nova.compute.manager [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Received event network-changed-b970c7e6-56af-4b0a-988b-96c733aa489a {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 785.803532] env[62740]: DEBUG nova.compute.manager [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Refreshing instance network info cache due to event network-changed-b970c7e6-56af-4b0a-988b-96c733aa489a. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 785.803719] env[62740]: DEBUG oslo_concurrency.lockutils [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] Acquiring lock "refresh_cache-6ec38a6c-f4b2-42ce-b371-5fe82d577545" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.803866] env[62740]: DEBUG oslo_concurrency.lockutils [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] Acquired lock "refresh_cache-6ec38a6c-f4b2-42ce-b371-5fe82d577545" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.804038] env[62740]: DEBUG nova.network.neutron [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Refreshing network info cache for port b970c7e6-56af-4b0a-988b-96c733aa489a {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 785.804510] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] Expecting reply to msg 8baaecf6825c4dc6bfeb4c4ddb64ea9f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 785.806723] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 785.806723] env[62740]: value = "task-640109" [ 785.806723] env[62740]: _type = "Task" [ 785.806723] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.816890] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640109, 'name': CreateVM_Task} progress is 5%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.817381] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8baaecf6825c4dc6bfeb4c4ddb64ea9f [ 786.197148] env[62740]: DEBUG nova.network.neutron [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Updated VIF entry in instance network info cache for port b970c7e6-56af-4b0a-988b-96c733aa489a. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 786.197540] env[62740]: DEBUG nova.network.neutron [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Updating instance_info_cache with network_info: [{"id": "b970c7e6-56af-4b0a-988b-96c733aa489a", "address": "fa:16:3e:57:4b:3f", "network": {"id": "7b13e1f8-2a70-4b95-8827-f4dc6674d116", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-649938031-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e28b312994174de0aa861f03d28f123d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb970c7e6-56", "ovs_interfaceid": "b970c7e6-56af-4b0a-988b-96c733aa489a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.198078] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] Expecting reply to msg 558c1ef856c845c7b14aad1b648268ef in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 786.207771] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 558c1ef856c845c7b14aad1b648268ef [ 786.208414] env[62740]: DEBUG oslo_concurrency.lockutils [req-a65ca970-8dc8-4ddd-9680-f955d5e53b6d req-ae1a0a74-ee98-42d6-9777-23274e40d7d6 service nova] Releasing lock "refresh_cache-6ec38a6c-f4b2-42ce-b371-5fe82d577545" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.316586] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640109, 'name': CreateVM_Task, 'duration_secs': 0.281341} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.316754] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 786.317456] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.317615] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.317963] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 786.318286] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c50bba6-7f99-43d8-98f4-b1c7be03875f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.324044] env[62740]: DEBUG oslo_vmware.api [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Waiting for the task: (returnval){ [ 786.324044] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]522561c5-04e7-fa6e-4e6c-4360069dccab" [ 786.324044] env[62740]: _type = "Task" [ 786.324044] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.333638] env[62740]: DEBUG oslo_vmware.api [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]522561c5-04e7-fa6e-4e6c-4360069dccab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.836991] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.836991] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 786.836991] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.979303] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Acquiring lock "732da1c8-e83e-4dd7-96c2-dbfa9468baab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.979585] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Lock "732da1c8-e83e-4dd7-96c2-dbfa9468baab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.483892] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg a7227d873dc94e989b815d76a6fb35f6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 795.501247] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7227d873dc94e989b815d76a6fb35f6 [ 795.502043] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "fece072a-baac-4301-988c-0068d6e71cff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.139630] env[62740]: WARNING oslo_vmware.rw_handles [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 796.139630] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 796.139630] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 796.139630] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 796.139630] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 796.139630] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 796.139630] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 796.139630] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 796.139630] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 796.139630] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 796.139630] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 796.139630] env[62740]: ERROR oslo_vmware.rw_handles [ 796.140070] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/9d4b10be-0c58-442c-9399-db90f722e4d3/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 796.141792] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 796.142082] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Copying Virtual Disk [datastore1] vmware_temp/9d4b10be-0c58-442c-9399-db90f722e4d3/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/9d4b10be-0c58-442c-9399-db90f722e4d3/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 796.142389] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18ff0852-413d-42cf-adf5-d43eaefb9630 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.150511] env[62740]: DEBUG oslo_vmware.api [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Waiting for the task: (returnval){ [ 796.150511] env[62740]: value = "task-640110" [ 796.150511] env[62740]: _type = "Task" [ 796.150511] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.158861] env[62740]: DEBUG oslo_vmware.api [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Task: {'id': task-640110, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.290465] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 4eddc87fb87946e9b6469c7229cd4886 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 796.299196] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4eddc87fb87946e9b6469c7229cd4886 [ 796.299662] env[62740]: DEBUG oslo_concurrency.lockutils [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Acquiring lock "3102cc87-df1a-4de8-bfdb-9b904f40ea2e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.662198] env[62740]: DEBUG oslo_vmware.exceptions [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 796.662198] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.662198] env[62740]: ERROR nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 796.662198] env[62740]: Faults: ['InvalidArgument'] [ 796.662198] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Traceback (most recent call last): [ 796.662198] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 796.662198] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] yield resources [ 796.662198] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 796.662754] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] self.driver.spawn(context, instance, image_meta, [ 796.662754] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 796.662754] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 796.662754] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 796.662754] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] self._fetch_image_if_missing(context, vi) [ 796.662754] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 796.662754] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] image_cache(vi, tmp_image_ds_loc) [ 796.662754] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 796.662754] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] vm_util.copy_virtual_disk( [ 796.662754] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 796.662754] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] session._wait_for_task(vmdk_copy_task) [ 796.662754] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 796.662754] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] return self.wait_for_task(task_ref) [ 796.663165] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 796.663165] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] return evt.wait() [ 796.663165] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 796.663165] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] result = hub.switch() [ 796.663165] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 796.663165] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] return self.greenlet.switch() [ 796.663165] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 796.663165] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] self.f(*self.args, **self.kw) [ 796.663165] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 796.663165] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] raise exceptions.translate_fault(task_info.error) [ 796.663165] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 796.663165] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Faults: ['InvalidArgument'] [ 796.663165] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] [ 796.663557] env[62740]: INFO nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Terminating instance [ 796.667053] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.667053] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 796.667053] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe0eeb53-a7c3-48a1-ba81-03205fc470ec {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.667053] env[62740]: DEBUG nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 796.667567] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 796.667879] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b210a30-b748-43d0-b847-971ef11fa664 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.676674] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 796.678112] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d55ec6ce-7a62-4ff7-ada3-89f9a16b920e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.679579] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 796.679756] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 796.680426] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4441c69-8786-461e-8003-571b89af6e28 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.685937] env[62740]: DEBUG oslo_vmware.api [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Waiting for the task: (returnval){ [ 796.685937] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]522ed140-d375-9fcc-9fa7-ee61a5a1c46c" [ 796.685937] env[62740]: _type = "Task" [ 796.685937] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.692600] env[62740]: DEBUG oslo_vmware.api [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]522ed140-d375-9fcc-9fa7-ee61a5a1c46c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.744751] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 796.744975] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 796.745163] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Deleting the datastore file [datastore1] fedb62e0-2602-4772-9e5d-00645922d2a8 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 796.745429] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5632cc3f-fa32-4120-9c45-900f937c1320 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.752155] env[62740]: DEBUG oslo_vmware.api [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Waiting for the task: (returnval){ [ 796.752155] env[62740]: value = "task-640112" [ 796.752155] env[62740]: _type = "Task" [ 796.752155] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.761151] env[62740]: DEBUG oslo_vmware.api [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Task: {'id': task-640112, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.192847] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 5bc6a6a45eff49a985035c64d181f226 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 797.200508] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 797.200792] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Creating directory with path [datastore1] vmware_temp/06dcec66-fb94-4cde-8199-fa8ec4b360d4/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 797.201022] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bfa17200-e45b-4bbd-90d6-87c654cc48a2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.203847] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5bc6a6a45eff49a985035c64d181f226 [ 797.204326] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Acquiring lock "4f0d1356-bdfb-4cb2-979a-e28f9025b311" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.213356] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Created directory with path [datastore1] vmware_temp/06dcec66-fb94-4cde-8199-fa8ec4b360d4/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 797.213560] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Fetch image to [datastore1] vmware_temp/06dcec66-fb94-4cde-8199-fa8ec4b360d4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 797.213732] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/06dcec66-fb94-4cde-8199-fa8ec4b360d4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 797.214490] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9200d635-c882-4b03-9809-3d41b0ea1ba4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.222321] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe130c67-7b01-47cb-a79d-0108472d73df {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.231776] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55c736d-7115-462b-9ef2-33463ea1b23e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.265318] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55667c28-94f3-4772-9b1a-b842b7315f28 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.272712] env[62740]: DEBUG oslo_vmware.api [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Task: {'id': task-640112, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064506} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.274132] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 797.274329] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 797.274507] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 797.274682] env[62740]: INFO nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Took 0.61 seconds to destroy the instance on the hypervisor. [ 797.276464] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-29881c4f-4954-41e9-af38-7cef1dfcecd3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.278376] env[62740]: DEBUG nova.compute.claims [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 797.278554] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.278928] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.280794] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg 4797ad1f058d4c63a7687a34b4bbe110 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 797.303344] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 797.317638] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4797ad1f058d4c63a7687a34b4bbe110 [ 797.357515] env[62740]: DEBUG oslo_vmware.rw_handles [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/06dcec66-fb94-4cde-8199-fa8ec4b360d4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 797.420763] env[62740]: DEBUG oslo_vmware.rw_handles [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 797.421395] env[62740]: DEBUG oslo_vmware.rw_handles [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/06dcec66-fb94-4cde-8199-fa8ec4b360d4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 797.737856] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0159e91-5119-4528-8f07-352e215d37a6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.745413] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29bfb72-18dc-4148-bb20-9eeed278db25 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.773951] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b26502a-919c-4183-9535-11a5f292a493 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.780518] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e13a070-cf27-491e-bf6c-24c139a21bca {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.792804] env[62740]: DEBUG nova.compute.provider_tree [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.793310] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg 80edd1df18b3494bb627986bf7cb19c1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 797.801535] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80edd1df18b3494bb627986bf7cb19c1 [ 797.802415] env[62740]: DEBUG nova.scheduler.client.report [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 797.804645] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg f8f1acea3dbc48a28a338761354e3317 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 797.817890] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8f1acea3dbc48a28a338761354e3317 [ 797.817890] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.538s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.817890] env[62740]: ERROR nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 797.817890] env[62740]: Faults: ['InvalidArgument'] [ 797.817890] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Traceback (most recent call last): [ 797.817890] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 797.817890] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] self.driver.spawn(context, instance, image_meta, [ 797.817890] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 797.817890] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 797.817890] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 797.818517] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] self._fetch_image_if_missing(context, vi) [ 797.818517] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 797.818517] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] image_cache(vi, tmp_image_ds_loc) [ 797.818517] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 797.818517] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] vm_util.copy_virtual_disk( [ 797.818517] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 797.818517] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] session._wait_for_task(vmdk_copy_task) [ 797.818517] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 797.818517] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] return self.wait_for_task(task_ref) [ 797.818517] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 797.818517] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] return evt.wait() [ 797.818517] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 797.818517] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] result = hub.switch() [ 797.819043] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 797.819043] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] return self.greenlet.switch() [ 797.819043] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 797.819043] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] self.f(*self.args, **self.kw) [ 797.819043] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 797.819043] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] raise exceptions.translate_fault(task_info.error) [ 797.819043] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 797.819043] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Faults: ['InvalidArgument'] [ 797.819043] env[62740]: ERROR nova.compute.manager [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] [ 797.819043] env[62740]: DEBUG nova.compute.utils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 797.819696] env[62740]: DEBUG nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Build of instance fedb62e0-2602-4772-9e5d-00645922d2a8 was re-scheduled: A specified parameter was not correct: fileType [ 797.819696] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 797.820095] env[62740]: DEBUG nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 797.820272] env[62740]: DEBUG nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 797.820445] env[62740]: DEBUG nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 797.820630] env[62740]: DEBUG nova.network.neutron [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 798.171942] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg 99e6ab81c1fc46519847b59d894e5106 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 798.184048] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99e6ab81c1fc46519847b59d894e5106 [ 798.184608] env[62740]: DEBUG nova.network.neutron [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.185100] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg 35d56f0a2cde476f9c55349afb5cafdb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 798.198062] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35d56f0a2cde476f9c55349afb5cafdb [ 798.198442] env[62740]: INFO nova.compute.manager [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] [instance: fedb62e0-2602-4772-9e5d-00645922d2a8] Took 0.38 seconds to deallocate network for instance. [ 798.200780] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg 1e262f2cd6cf4da29b5ca1a1c879fc6b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 798.241127] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e262f2cd6cf4da29b5ca1a1c879fc6b [ 798.242923] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg f2e5b294162742e49d4ae759d036bce6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 798.275323] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2e5b294162742e49d4ae759d036bce6 [ 798.309044] env[62740]: INFO nova.scheduler.client.report [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Deleted allocations for instance fedb62e0-2602-4772-9e5d-00645922d2a8 [ 798.316335] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Expecting reply to msg 9c9f70421696471693f4c3a2baf1321c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 798.329745] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c9f70421696471693f4c3a2baf1321c [ 798.330332] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8ed4d0cc-8b20-4e17-9404-3a28ebf1058e tempest-VolumesAssistedSnapshotsTest-2041209916 tempest-VolumesAssistedSnapshotsTest-2041209916-project-member] Lock "fedb62e0-2602-4772-9e5d-00645922d2a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 184.975s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.330901] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 4b0c2b2bab9f4b4eb72deb641ee8e786 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 798.341676] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b0c2b2bab9f4b4eb72deb641ee8e786 [ 798.342224] env[62740]: DEBUG nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 798.343977] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg d553d21689f145328c1222f8e6e32f2e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 798.377663] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d553d21689f145328c1222f8e6e32f2e [ 798.394586] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.394841] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.396406] env[62740]: INFO nova.compute.claims [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.398089] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 9a37cd15ae084cdc8e524cac793ed560 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 798.434045] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a37cd15ae084cdc8e524cac793ed560 [ 798.435866] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 9b553995ab10400baeda0ac3cc3b40fb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 798.443345] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b553995ab10400baeda0ac3cc3b40fb [ 798.802088] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84a2990-9221-4b68-b1ee-61f4e0f324f6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.809678] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d09cb8-f2ca-41cb-a72c-8c57754a41e7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.840555] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93593a0-3df7-4ddb-8818-d7aa08f6fab0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.847845] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845639ca-1f0e-4549-907b-709bfc0c0a53 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.860515] env[62740]: DEBUG nova.compute.provider_tree [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.860991] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 53ae3ce2adf84deca0c164ae70278052 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 798.868388] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53ae3ce2adf84deca0c164ae70278052 [ 798.869289] env[62740]: DEBUG nova.scheduler.client.report [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 798.871520] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 4cb1fdbe3a2944dba6c5c48a1b5b374a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 798.883686] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cb1fdbe3a2944dba6c5c48a1b5b374a [ 798.885022] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.490s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.885022] env[62740]: DEBUG nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 798.886675] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 103f7b5003b0403f9698c6c310ab5a0e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 798.916514] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 103f7b5003b0403f9698c6c310ab5a0e [ 798.918380] env[62740]: DEBUG nova.compute.utils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 798.919062] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 6fb30c38e9cd4716a9c9dbf045cc785b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 798.921461] env[62740]: DEBUG nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 798.921461] env[62740]: DEBUG nova.network.neutron [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 798.930800] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6fb30c38e9cd4716a9c9dbf045cc785b [ 798.931364] env[62740]: DEBUG nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 798.933093] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 1d258887204c496fa1fb701f5741bf4f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 798.966738] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d258887204c496fa1fb701f5741bf4f [ 798.971199] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 03b798eb6f944ca6ad780dc228b39f59 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 798.988021] env[62740]: DEBUG nova.policy [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd69d6db778f64160881e1dfebfd4ed7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce4469c0ef4e4e42bb30cd2f947294f3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 799.000998] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03b798eb6f944ca6ad780dc228b39f59 [ 799.002158] env[62740]: DEBUG nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 799.031388] env[62740]: DEBUG nova.virt.hardware [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 799.031642] env[62740]: DEBUG nova.virt.hardware [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 799.031799] env[62740]: DEBUG nova.virt.hardware [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 799.031982] env[62740]: DEBUG nova.virt.hardware [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 799.032146] env[62740]: DEBUG nova.virt.hardware [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 799.032321] env[62740]: DEBUG nova.virt.hardware [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 799.032544] env[62740]: DEBUG nova.virt.hardware [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 799.032778] env[62740]: DEBUG nova.virt.hardware [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 799.033015] env[62740]: DEBUG nova.virt.hardware [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 799.033196] env[62740]: DEBUG nova.virt.hardware [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 799.033385] env[62740]: DEBUG nova.virt.hardware [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 799.034484] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6ae713-6c62-44ca-9f06-0f271fa9af4a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.042909] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84976f5-cf7e-4560-8af1-1b758163def6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.583238] env[62740]: DEBUG nova.network.neutron [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Successfully created port: 9480f2b5-c3e2-4713-8d03-521845d9a8d7 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 800.627536] env[62740]: DEBUG nova.compute.manager [req-5f2d22b1-47a7-473f-925f-87b64d89f94f req-d51aec03-5455-43be-99d4-bd0c7dc04242 service nova] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Received event network-vif-plugged-9480f2b5-c3e2-4713-8d03-521845d9a8d7 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 800.627786] env[62740]: DEBUG oslo_concurrency.lockutils [req-5f2d22b1-47a7-473f-925f-87b64d89f94f req-d51aec03-5455-43be-99d4-bd0c7dc04242 service nova] Acquiring lock "26712c18-d9f4-4d7d-80fb-4d527da9c1e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.627958] env[62740]: DEBUG oslo_concurrency.lockutils [req-5f2d22b1-47a7-473f-925f-87b64d89f94f req-d51aec03-5455-43be-99d4-bd0c7dc04242 service nova] Lock "26712c18-d9f4-4d7d-80fb-4d527da9c1e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.628152] env[62740]: DEBUG oslo_concurrency.lockutils [req-5f2d22b1-47a7-473f-925f-87b64d89f94f req-d51aec03-5455-43be-99d4-bd0c7dc04242 service nova] Lock "26712c18-d9f4-4d7d-80fb-4d527da9c1e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.628527] env[62740]: DEBUG nova.compute.manager [req-5f2d22b1-47a7-473f-925f-87b64d89f94f req-d51aec03-5455-43be-99d4-bd0c7dc04242 service nova] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] No waiting events found dispatching network-vif-plugged-9480f2b5-c3e2-4713-8d03-521845d9a8d7 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 800.628706] env[62740]: WARNING nova.compute.manager [req-5f2d22b1-47a7-473f-925f-87b64d89f94f req-d51aec03-5455-43be-99d4-bd0c7dc04242 service nova] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Received unexpected event network-vif-plugged-9480f2b5-c3e2-4713-8d03-521845d9a8d7 for instance with vm_state building and task_state spawning. [ 800.696706] env[62740]: DEBUG nova.network.neutron [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Successfully updated port: 9480f2b5-c3e2-4713-8d03-521845d9a8d7 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 800.696706] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg bcab111ac1f04dbfa9d761f79e61c448 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 800.706034] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcab111ac1f04dbfa9d761f79e61c448 [ 800.706996] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "refresh_cache-26712c18-d9f4-4d7d-80fb-4d527da9c1e3" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.707278] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquired lock "refresh_cache-26712c18-d9f4-4d7d-80fb-4d527da9c1e3" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.707527] env[62740]: DEBUG nova.network.neutron [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 800.708126] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg beb31cc9a00241c288339f0eee6420df in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 800.717426] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg beb31cc9a00241c288339f0eee6420df [ 800.788150] env[62740]: DEBUG nova.network.neutron [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 801.146920] env[62740]: DEBUG nova.network.neutron [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Updating instance_info_cache with network_info: [{"id": "9480f2b5-c3e2-4713-8d03-521845d9a8d7", "address": "fa:16:3e:a8:3a:fa", "network": {"id": "c7681252-9fbe-485e-ab67-59da6e6d7279", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1927212820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce4469c0ef4e4e42bb30cd2f947294f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9480f2b5-c3", "ovs_interfaceid": "9480f2b5-c3e2-4713-8d03-521845d9a8d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.147462] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 4c890649a6b7440c8c986212a347a442 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 801.159586] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c890649a6b7440c8c986212a347a442 [ 801.160269] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Releasing lock "refresh_cache-26712c18-d9f4-4d7d-80fb-4d527da9c1e3" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.160503] env[62740]: DEBUG nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Instance network_info: |[{"id": "9480f2b5-c3e2-4713-8d03-521845d9a8d7", "address": "fa:16:3e:a8:3a:fa", "network": {"id": "c7681252-9fbe-485e-ab67-59da6e6d7279", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1927212820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce4469c0ef4e4e42bb30cd2f947294f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9480f2b5-c3", "ovs_interfaceid": "9480f2b5-c3e2-4713-8d03-521845d9a8d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 801.160904] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:3a:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0a76279-3c11-4bef-b124-2a2ee13fa377', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9480f2b5-c3e2-4713-8d03-521845d9a8d7', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 801.169827] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Creating folder: Project (ce4469c0ef4e4e42bb30cd2f947294f3). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 801.170411] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51db3790-e93a-44da-ba89-a40a4452c491 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.183645] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Created folder: Project (ce4469c0ef4e4e42bb30cd2f947294f3) in parent group-v156037. [ 801.183645] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Creating folder: Instances. Parent ref: group-v156088. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 801.183835] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65e69f7d-5c3e-46ed-ab32-cf5c4d7dea39 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.194604] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Created folder: Instances in parent group-v156088. [ 801.194849] env[62740]: DEBUG oslo.service.loopingcall [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 801.195040] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 801.195253] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8a48b34-e92e-4c2f-9c9c-c3b3dfc2e531 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.213539] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 801.213539] env[62740]: value = "task-640115" [ 801.213539] env[62740]: _type = "Task" [ 801.213539] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.221170] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640115, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.397220] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 87cc3c7b987a46b9a45c715992ba48d0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 801.409205] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87cc3c7b987a46b9a45c715992ba48d0 [ 801.409828] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Acquiring lock "75050b95-60c6-4e44-a1d5-0d47492dd739" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.723773] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640115, 'name': CreateVM_Task, 'duration_secs': 0.285965} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.724092] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 801.724630] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.724821] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.725115] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 801.725355] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fb8cc7c-3bf6-4ad2-9c39-5222115d9425 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.729928] env[62740]: DEBUG oslo_vmware.api [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for the task: (returnval){ [ 801.729928] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52f46975-a5dc-ab0f-13a2-38c72f9a0e10" [ 801.729928] env[62740]: _type = "Task" [ 801.729928] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.738681] env[62740]: DEBUG oslo_vmware.api [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52f46975-a5dc-ab0f-13a2-38c72f9a0e10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.240867] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.241146] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 802.241350] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.872543] env[62740]: DEBUG nova.compute.manager [req-4fea8574-33ad-419f-8259-f11fe7aab760 req-5aa47447-84c1-44b0-96e2-030b0b1d8580 service nova] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Received event network-changed-9480f2b5-c3e2-4713-8d03-521845d9a8d7 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 802.872831] env[62740]: DEBUG nova.compute.manager [req-4fea8574-33ad-419f-8259-f11fe7aab760 req-5aa47447-84c1-44b0-96e2-030b0b1d8580 service nova] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Refreshing instance network info cache due to event network-changed-9480f2b5-c3e2-4713-8d03-521845d9a8d7. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 802.872947] env[62740]: DEBUG oslo_concurrency.lockutils [req-4fea8574-33ad-419f-8259-f11fe7aab760 req-5aa47447-84c1-44b0-96e2-030b0b1d8580 service nova] Acquiring lock "refresh_cache-26712c18-d9f4-4d7d-80fb-4d527da9c1e3" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.873107] env[62740]: DEBUG oslo_concurrency.lockutils [req-4fea8574-33ad-419f-8259-f11fe7aab760 req-5aa47447-84c1-44b0-96e2-030b0b1d8580 service nova] Acquired lock "refresh_cache-26712c18-d9f4-4d7d-80fb-4d527da9c1e3" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.873267] env[62740]: DEBUG nova.network.neutron [req-4fea8574-33ad-419f-8259-f11fe7aab760 req-5aa47447-84c1-44b0-96e2-030b0b1d8580 service nova] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Refreshing network info cache for port 9480f2b5-c3e2-4713-8d03-521845d9a8d7 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 802.873737] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-4fea8574-33ad-419f-8259-f11fe7aab760 req-5aa47447-84c1-44b0-96e2-030b0b1d8580 service nova] Expecting reply to msg 3965ef2f7300401c8539695854b16aa2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 802.881294] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3965ef2f7300401c8539695854b16aa2 [ 803.486589] env[62740]: DEBUG nova.network.neutron [req-4fea8574-33ad-419f-8259-f11fe7aab760 req-5aa47447-84c1-44b0-96e2-030b0b1d8580 service nova] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Updated VIF entry in instance network info cache for port 9480f2b5-c3e2-4713-8d03-521845d9a8d7. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 803.486947] env[62740]: DEBUG nova.network.neutron [req-4fea8574-33ad-419f-8259-f11fe7aab760 req-5aa47447-84c1-44b0-96e2-030b0b1d8580 service nova] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Updating instance_info_cache with network_info: [{"id": "9480f2b5-c3e2-4713-8d03-521845d9a8d7", "address": "fa:16:3e:a8:3a:fa", "network": {"id": "c7681252-9fbe-485e-ab67-59da6e6d7279", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1927212820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce4469c0ef4e4e42bb30cd2f947294f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9480f2b5-c3", "ovs_interfaceid": "9480f2b5-c3e2-4713-8d03-521845d9a8d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.487495] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-4fea8574-33ad-419f-8259-f11fe7aab760 req-5aa47447-84c1-44b0-96e2-030b0b1d8580 service nova] Expecting reply to msg c8f26dced546450ea0b124c70cdca20d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 803.496054] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8f26dced546450ea0b124c70cdca20d [ 803.496661] env[62740]: DEBUG oslo_concurrency.lockutils [req-4fea8574-33ad-419f-8259-f11fe7aab760 req-5aa47447-84c1-44b0-96e2-030b0b1d8580 service nova] Releasing lock "refresh_cache-26712c18-d9f4-4d7d-80fb-4d527da9c1e3" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.312253] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg bb2f7db0dee54d57aace7c1e684fc976 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 804.322177] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb2f7db0dee54d57aace7c1e684fc976 [ 804.322652] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "8053d2ae-ca61-4282-aa89-83f3a2e107bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.933817] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg ab84b5ff8f39417d8251c05d7ef79c41 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 809.943893] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab84b5ff8f39417d8251c05d7ef79c41 [ 809.944416] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Acquiring lock "a24df1e4-2865-4ab3-beae-0892dca12bef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.367258] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 568aa11705814a98a070f8a099dc39a5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 812.377881] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 568aa11705814a98a070f8a099dc39a5 [ 812.377881] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Acquiring lock "f98589dc-ea7a-44c8-8cca-119d126ea0de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.403885] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 345a6a609f5d4fa5a0ad193c3deccac1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 813.413310] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 345a6a609f5d4fa5a0ad193c3deccac1 [ 813.413864] env[62740]: DEBUG oslo_concurrency.lockutils [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquiring lock "fa5248d1-bddf-4244-a363-2113b0473980" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.067151] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 999410c7fcfd424f8742e39757a5da41 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 817.077085] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 999410c7fcfd424f8742e39757a5da41 [ 817.077589] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "26712c18-d9f4-4d7d-80fb-4d527da9c1e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.837824] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 3cfc43b1760c42b8a9c8f3a7b9b924c2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 817.847666] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3cfc43b1760c42b8a9c8f3a7b9b924c2 [ 817.848346] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Acquiring lock "6ec38a6c-f4b2-42ce-b371-5fe82d577545" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.890643] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.890898] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Cleaning up deleted instances {{(pid=62740) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11217}} [ 820.891786] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 3f9d62a956b64e03bcda06f281c9c336 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 820.905802] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f9d62a956b64e03bcda06f281c9c336 [ 820.906618] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] There are 0 instances to clean {{(pid=62740) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 820.907241] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.907431] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Cleaning up deleted instances with incomplete migration {{(pid=62740) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11255}} [ 820.907719] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0455f57dea0a42e083ee3cc61286a7b4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 820.920098] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0455f57dea0a42e083ee3cc61286a7b4 [ 820.920098] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.920098] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg aef27dd871394d9fa180374d09f891c7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 820.926911] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aef27dd871394d9fa180374d09f891c7 [ 823.927567] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.927868] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 823.927868] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 823.928544] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg c2a95cc64bcd4155841b6f785ef81ba4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 823.950874] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2a95cc64bcd4155841b6f785ef81ba4 [ 823.955438] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: fece072a-baac-4301-988c-0068d6e71cff] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 823.955619] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 823.955756] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 823.955894] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 823.956034] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 823.956165] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 823.956314] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 823.956447] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 823.956624] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 823.956697] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 823.956803] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 823.957286] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.957480] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.957798] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 70207c582a8945ada0b55559717fd5af in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 823.967706] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70207c582a8945ada0b55559717fd5af [ 823.968899] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.969123] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.969339] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.969471] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 823.970509] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdac36b5-bbc1-45b5-8832-b666d98df408 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.979533] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c146f253-c103-4262-8519-36419025ebae {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.995414] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5989c5-aa0b-47ac-a361-9be6ea40d1ba {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.001658] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126f663e-edcb-428f-832c-5f662c681dcd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.031355] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181693MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 824.031528] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.031728] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.032569] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg cb2dd70130a4412381f2eb16d9ff56e8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.070338] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb2dd70130a4412381f2eb16d9ff56e8 [ 824.074605] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 9a5602d506604f648af7df7769e7c5aa in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.084429] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a5602d506604f648af7df7769e7c5aa [ 824.176685] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance fece072a-baac-4301-988c-0068d6e71cff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 824.176768] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3102cc87-df1a-4de8-bfdb-9b904f40ea2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 824.176956] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4f0d1356-bdfb-4cb2-979a-e28f9025b311 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 824.177044] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 75050b95-60c6-4e44-a1d5-0d47492dd739 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 824.177191] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 8053d2ae-ca61-4282-aa89-83f3a2e107bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 824.177411] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a24df1e4-2865-4ab3-beae-0892dca12bef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 824.177597] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f98589dc-ea7a-44c8-8cca-119d126ea0de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 824.177677] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance fa5248d1-bddf-4244-a363-2113b0473980 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 824.177791] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6ec38a6c-f4b2-42ce-b371-5fe82d577545 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 824.177936] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 26712c18-d9f4-4d7d-80fb-4d527da9c1e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 824.178556] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 29fd68898162489cb4bbcc5a88e8b67b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.193424] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29fd68898162489cb4bbcc5a88e8b67b [ 824.194310] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b2ec3212-25e1-4027-801d-a23309a4d0e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.194848] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5ff42faceccd440b81f261dba13bd123 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.210703] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ff42faceccd440b81f261dba13bd123 [ 824.211327] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1ae43e6d-c9ac-494d-a7a9-1f6ff538345a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.211887] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 87ef6a8b5e8e4a94a127ac5bf093903d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.223264] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87ef6a8b5e8e4a94a127ac5bf093903d [ 824.223774] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d490ad64-a2fe-4c08-b0fc-56b2e00d9c98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.224341] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 749777d250594c7080fcc3e30bbf0a5c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.234332] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 749777d250594c7080fcc3e30bbf0a5c [ 824.235089] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f7a74539-8a36-450f-aec4-d059670e8f38 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.235672] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5da0533adb914e98b577377938517949 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.244970] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5da0533adb914e98b577377938517949 [ 824.245631] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e473e254-387f-4581-97bc-bdeab221b10f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.246125] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg a88d116ec75c493cb5ceb73748fc0231 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.255273] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a88d116ec75c493cb5ceb73748fc0231 [ 824.255964] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 68aa9321-22ce-45a0-8323-fa8564dca46b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.256588] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg fca26c1b9f714875810382dfdfdfbc77 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.265604] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fca26c1b9f714875810382dfdfdfbc77 [ 824.266348] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4ec9a397-1e4b-4767-b926-ccc6f63a951c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.266812] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 96c39fb7739e4deaaaf47317da844d1f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.276262] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96c39fb7739e4deaaaf47317da844d1f [ 824.276942] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e1c7a748-b3f3-41b7-8784-13699549a01d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.277432] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 11c9faaaf742445ca449e3f2a9d57003 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.288326] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11c9faaaf742445ca449e3f2a9d57003 [ 824.289020] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 25cef75e-2176-4999-965b-155cd7f8d137 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.289451] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ea056f45b877476599e4f7f45ce6c888 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.298445] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea056f45b877476599e4f7f45ce6c888 [ 824.299105] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 0e5caaed-20ff-40bd-b0cf-016ac18642cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.299628] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 3d7dbac789354e8abe5dd9cc60c9d3d9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.308970] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d7dbac789354e8abe5dd9cc60c9d3d9 [ 824.309301] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b9840eca-ec5f-4a8c-9bdf-1212e2640e5c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.309771] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ba71f1a4adb34580832c0d0aec485891 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.318979] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba71f1a4adb34580832c0d0aec485891 [ 824.319668] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance ec703551-5c8d-43bb-b727-709aeeeac9a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.320222] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7ba50ad1d7b648ca9ca49ee49cb9cd77 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.330080] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ba50ad1d7b648ca9ca49ee49cb9cd77 [ 824.330772] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 00085df9-ce61-4ccc-8ecf-16956109eb8f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.332023] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2e460ec703b943b6acde00c7a22824ec in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.342813] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e460ec703b943b6acde00c7a22824ec [ 824.343510] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e5b0daa1-6745-48ad-8e69-6c7362bac085 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.343990] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b47fbf5702c04c9aaee53a73575a763b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.354979] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b47fbf5702c04c9aaee53a73575a763b [ 824.355810] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e8bb18d5-207c-48fb-b7e5-06b72ce61b4f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.356340] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 374b0c8966de48379d912c069f3c8d52 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.367391] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 374b0c8966de48379d912c069f3c8d52 [ 824.368076] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 697e3884-2ef4-423e-af81-e5d1e94f65a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.368642] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg bcfe41e6ee4b4f34b12d0a79ee0cae2c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.382228] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcfe41e6ee4b4f34b12d0a79ee0cae2c [ 824.385129] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f22357ec-450c-4545-8822-74b83bfc5a35 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.385461] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 015ba400072745479e948ffd6e3be75d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.400110] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 015ba400072745479e948ffd6e3be75d [ 824.400110] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 732da1c8-e83e-4dd7-96c2-dbfa9468baab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 824.400110] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 824.400110] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 824.767892] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28a1da4-3406-4356-a8fa-d8e8126b97d7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.776715] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2144010a-dd23-462f-9045-c5fe04b64d52 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.805869] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8873711e-01cf-4577-90ea-37f9575fd5b7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.813433] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6091569-2d43-412f-9cc0-9a4705824a08 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.826785] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.827379] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 537ae690ba5547109642a990a9d7a267 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.834650] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 537ae690ba5547109642a990a9d7a267 [ 824.835529] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 824.838282] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2e326776ce4e4d64854759f4865e8975 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 824.851036] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e326776ce4e4d64854759f4865e8975 [ 824.851879] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 824.852102] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.820s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.666990] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "388a39df-9fa9-4153-9f3c-4ad94fd5edfb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.667328] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "388a39df-9fa9-4153-9f3c-4ad94fd5edfb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.785075] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 825.885929] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 825.890471] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 825.890696] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 825.890868] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 825.891025] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 826.891253] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 829.371184] env[62740]: WARNING oslo_vmware.rw_handles [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 829.371184] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 829.371184] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 829.371184] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 829.371184] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 829.371184] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 829.371184] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 829.371184] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 829.371184] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 829.371184] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 829.371184] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 829.371184] env[62740]: ERROR oslo_vmware.rw_handles [ 829.371184] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/98a1b38b-ccdd-46ea-bfbf-ded3a9c5e5cb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 829.372545] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 829.375862] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Copying Virtual Disk [datastore2] vmware_temp/98a1b38b-ccdd-46ea-bfbf-ded3a9c5e5cb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/98a1b38b-ccdd-46ea-bfbf-ded3a9c5e5cb/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 829.375862] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0571c271-8847-4af8-a050-921545404f6b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.383636] env[62740]: DEBUG oslo_vmware.api [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Waiting for the task: (returnval){ [ 829.383636] env[62740]: value = "task-640116" [ 829.383636] env[62740]: _type = "Task" [ 829.383636] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.392626] env[62740]: DEBUG oslo_vmware.api [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Task: {'id': task-640116, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.895296] env[62740]: DEBUG oslo_vmware.exceptions [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 829.895753] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.896494] env[62740]: ERROR nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 829.896494] env[62740]: Faults: ['InvalidArgument'] [ 829.896494] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] Traceback (most recent call last): [ 829.896494] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 829.896494] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] yield resources [ 829.896494] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 829.896494] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] self.driver.spawn(context, instance, image_meta, [ 829.896494] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 829.896494] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 829.896494] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 829.896494] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] self._fetch_image_if_missing(context, vi) [ 829.896494] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 829.897045] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] image_cache(vi, tmp_image_ds_loc) [ 829.897045] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 829.897045] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] vm_util.copy_virtual_disk( [ 829.897045] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 829.897045] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] session._wait_for_task(vmdk_copy_task) [ 829.897045] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 829.897045] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] return self.wait_for_task(task_ref) [ 829.897045] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 829.897045] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] return evt.wait() [ 829.897045] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 829.897045] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] result = hub.switch() [ 829.897045] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 829.897045] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] return self.greenlet.switch() [ 829.899193] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 829.899193] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] self.f(*self.args, **self.kw) [ 829.899193] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 829.899193] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] raise exceptions.translate_fault(task_info.error) [ 829.899193] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 829.899193] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] Faults: ['InvalidArgument'] [ 829.899193] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] [ 829.899193] env[62740]: INFO nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Terminating instance [ 829.899919] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.900393] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 829.901133] env[62740]: DEBUG nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 829.903072] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 829.903072] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b620059c-d486-4d87-af1f-50f9435f6df3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.904445] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4859fb-dd0e-407d-8703-b4068da35dd4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.912025] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 829.913309] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d2fe9bba-3238-487b-89e7-a2bcab9aedbc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.917018] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 829.917018] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 829.917018] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98ede9ee-1c02-486a-9c24-1a2d5668e312 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.925020] env[62740]: DEBUG oslo_vmware.api [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Waiting for the task: (returnval){ [ 829.925020] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]521f16a5-4ee5-9bd9-d0ef-1f47dc4e561d" [ 829.925020] env[62740]: _type = "Task" [ 829.925020] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.930333] env[62740]: DEBUG oslo_vmware.api [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]521f16a5-4ee5-9bd9-d0ef-1f47dc4e561d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.984424] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 829.984589] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 829.984763] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Deleting the datastore file [datastore2] fece072a-baac-4301-988c-0068d6e71cff {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 829.985813] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-570b49b2-4174-44bf-83e9-8a357ddb6817 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.992389] env[62740]: DEBUG oslo_vmware.api [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Waiting for the task: (returnval){ [ 829.992389] env[62740]: value = "task-640118" [ 829.992389] env[62740]: _type = "Task" [ 829.992389] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.000037] env[62740]: DEBUG oslo_vmware.api [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Task: {'id': task-640118, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.272653] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "5f57389d-853e-4439-872a-8345664578d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.272838] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "5f57389d-853e-4439-872a-8345664578d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.433813] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 830.435387] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Creating directory with path [datastore2] vmware_temp/04c33d33-a6b6-483c-b3c9-fe0bfd28c513/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 830.435685] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e687fe68-7a4f-4687-be7b-e2c331ac1b98 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.447920] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Created directory with path [datastore2] vmware_temp/04c33d33-a6b6-483c-b3c9-fe0bfd28c513/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 830.448152] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Fetch image to [datastore2] vmware_temp/04c33d33-a6b6-483c-b3c9-fe0bfd28c513/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 830.449068] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/04c33d33-a6b6-483c-b3c9-fe0bfd28c513/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 830.449258] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282f3972-8bf8-4839-bf4e-4146d43434d9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.456681] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caba4ce4-228a-4811-b6da-7c381eda9403 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.464932] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0ee718-96b6-4a11-999b-4a3bf74f5676 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.499216] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed99ee4e-940f-4816-9a22-90f5ba82d744 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.506410] env[62740]: DEBUG oslo_vmware.api [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Task: {'id': task-640118, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084113} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.507898] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 830.508107] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 830.508655] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 830.508655] env[62740]: INFO nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Took 0.61 seconds to destroy the instance on the hypervisor. [ 830.510315] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-33baf933-a627-4eb0-a0b4-5517a60124c1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.512347] env[62740]: DEBUG nova.compute.claims [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 830.512525] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.512737] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.514640] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 8318bdb87b1543268e8b1782dea05b4e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 830.543685] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 830.559984] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8318bdb87b1543268e8b1782dea05b4e [ 830.588103] env[62740]: DEBUG nova.scheduler.client.report [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Refreshing inventories for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 830.604232] env[62740]: DEBUG nova.scheduler.client.report [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Updating ProviderTree inventory for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 830.604463] env[62740]: DEBUG nova.compute.provider_tree [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Updating inventory in ProviderTree for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 830.610266] env[62740]: DEBUG oslo_vmware.rw_handles [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/04c33d33-a6b6-483c-b3c9-fe0bfd28c513/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 830.687653] env[62740]: DEBUG nova.scheduler.client.report [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Refreshing aggregate associations for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0, aggregates: None {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 830.692884] env[62740]: DEBUG oslo_vmware.rw_handles [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 830.693171] env[62740]: DEBUG oslo_vmware.rw_handles [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/04c33d33-a6b6-483c-b3c9-fe0bfd28c513/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 830.710849] env[62740]: DEBUG nova.scheduler.client.report [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Refreshing trait associations for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 831.070393] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7fd8308-6ce5-4154-a145-edf5901d01fb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.080658] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e43d74a7-3d91-472f-bb3a-b998e4c94a4e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.116639] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1233a5fa-34b9-4139-892f-9182728c7dcc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.127617] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-762f8aa3-e94f-4b6a-ab1c-76e6b97a61ee {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.145750] env[62740]: DEBUG nova.compute.provider_tree [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.145750] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 2a218bedb48b4c05ae44cc1fc1039bb5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 831.150135] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a218bedb48b4c05ae44cc1fc1039bb5 [ 831.151190] env[62740]: DEBUG nova.scheduler.client.report [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 831.153461] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 076415dcc029429684f325ed033fd1f7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 831.170280] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 076415dcc029429684f325ed033fd1f7 [ 831.171045] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.658s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.171869] env[62740]: ERROR nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 831.171869] env[62740]: Faults: ['InvalidArgument'] [ 831.171869] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] Traceback (most recent call last): [ 831.171869] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 831.171869] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] self.driver.spawn(context, instance, image_meta, [ 831.171869] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 831.171869] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 831.171869] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 831.171869] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] self._fetch_image_if_missing(context, vi) [ 831.171869] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 831.171869] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] image_cache(vi, tmp_image_ds_loc) [ 831.171869] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 831.172402] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] vm_util.copy_virtual_disk( [ 831.172402] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 831.172402] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] session._wait_for_task(vmdk_copy_task) [ 831.172402] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 831.172402] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] return self.wait_for_task(task_ref) [ 831.172402] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 831.172402] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] return evt.wait() [ 831.172402] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 831.172402] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] result = hub.switch() [ 831.172402] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 831.172402] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] return self.greenlet.switch() [ 831.172402] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 831.172402] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] self.f(*self.args, **self.kw) [ 831.172849] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 831.172849] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] raise exceptions.translate_fault(task_info.error) [ 831.172849] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 831.172849] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] Faults: ['InvalidArgument'] [ 831.172849] env[62740]: ERROR nova.compute.manager [instance: fece072a-baac-4301-988c-0068d6e71cff] [ 831.172849] env[62740]: DEBUG nova.compute.utils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 831.174243] env[62740]: DEBUG nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Build of instance fece072a-baac-4301-988c-0068d6e71cff was re-scheduled: A specified parameter was not correct: fileType [ 831.174243] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 831.174618] env[62740]: DEBUG nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 831.174844] env[62740]: DEBUG nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 831.174939] env[62740]: DEBUG nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 831.175119] env[62740]: DEBUG nova.network.neutron [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 831.958737] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 4b438252158d4f6db9d0d1bcedd0242c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 831.968691] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b438252158d4f6db9d0d1bcedd0242c [ 831.968691] env[62740]: DEBUG nova.network.neutron [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.968691] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 68eee5c8bd8e4787acad9bbcd31ae99e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 831.983047] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68eee5c8bd8e4787acad9bbcd31ae99e [ 831.984225] env[62740]: INFO nova.compute.manager [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Took 0.81 seconds to deallocate network for instance. [ 831.988181] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 44ec4166a5ae46c181300ed12fc419e2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.038562] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44ec4166a5ae46c181300ed12fc419e2 [ 832.042914] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg ff4e9f61ea3848b0ae069348bf37e6fb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.090400] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff4e9f61ea3848b0ae069348bf37e6fb [ 832.118921] env[62740]: INFO nova.scheduler.client.report [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Deleted allocations for instance fece072a-baac-4301-988c-0068d6e71cff [ 832.127469] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg de2dd7e67718455487f57c486474d271 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.150857] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de2dd7e67718455487f57c486474d271 [ 832.151716] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee899ab6-3a3f-4013-94ca-921e02fdefc8 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "fece072a-baac-4301-988c-0068d6e71cff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.526s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.152291] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8c680f70-93a6-403b-91ca-5339f8edd931 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 1db4dda3930a416982551409bc99f82f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.155414] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "fece072a-baac-4301-988c-0068d6e71cff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 36.653s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.155989] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "fece072a-baac-4301-988c-0068d6e71cff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.155989] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "fece072a-baac-4301-988c-0068d6e71cff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.156212] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "fece072a-baac-4301-988c-0068d6e71cff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.159878] env[62740]: INFO nova.compute.manager [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Terminating instance [ 832.160665] env[62740]: DEBUG nova.compute.manager [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 832.160919] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 832.161510] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a69849f3-0354-49b3-9afd-3dcd214a57e8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.171636] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e60dc0f-a7ff-4bff-9cd9-2489d1a67487 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.182792] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1db4dda3930a416982551409bc99f82f [ 832.185820] env[62740]: DEBUG nova.compute.manager [None req-8c680f70-93a6-403b-91ca-5339f8edd931 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: ab15259f-6344-4ba0-9abd-8b0ee7df59fa] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.185820] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8c680f70-93a6-403b-91ca-5339f8edd931 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 85b5b89febef4771a80e4c629a89a618 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.209412] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fece072a-baac-4301-988c-0068d6e71cff could not be found. [ 832.210335] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 832.210335] env[62740]: INFO nova.compute.manager [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: fece072a-baac-4301-988c-0068d6e71cff] Took 0.05 seconds to destroy the instance on the hypervisor. [ 832.210335] env[62740]: DEBUG oslo.service.loopingcall [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 832.210335] env[62740]: DEBUG nova.compute.manager [-] [instance: fece072a-baac-4301-988c-0068d6e71cff] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 832.210566] env[62740]: DEBUG nova.network.neutron [-] [instance: fece072a-baac-4301-988c-0068d6e71cff] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 832.225572] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85b5b89febef4771a80e4c629a89a618 [ 832.225572] env[62740]: DEBUG nova.compute.manager [None req-8c680f70-93a6-403b-91ca-5339f8edd931 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: ab15259f-6344-4ba0-9abd-8b0ee7df59fa] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 832.225572] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8c680f70-93a6-403b-91ca-5339f8edd931 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 3c2f6fbc1ffb4d0084ad34f26bb0b29c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.243128] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 49b7ee3aee1443b6b87704a92a7baf20 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.251427] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49b7ee3aee1443b6b87704a92a7baf20 [ 832.252400] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c2f6fbc1ffb4d0084ad34f26bb0b29c [ 832.252767] env[62740]: DEBUG nova.network.neutron [-] [instance: fece072a-baac-4301-988c-0068d6e71cff] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.253402] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c18e7efd9ba144a9beaabc02297cfac0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.261255] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c18e7efd9ba144a9beaabc02297cfac0 [ 832.261794] env[62740]: INFO nova.compute.manager [-] [instance: fece072a-baac-4301-988c-0068d6e71cff] Took 0.05 seconds to deallocate network for instance. [ 832.269746] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 82cd9038d67f44029835cffdbe6dc32d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.271770] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8c680f70-93a6-403b-91ca-5339f8edd931 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Lock "ab15259f-6344-4ba0-9abd-8b0ee7df59fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.764s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.272431] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2117ed-d5eb-4992-88e2-8be9df8b68a2 tempest-ServersWithSpecificFlavorTestJSON-23875077 tempest-ServersWithSpecificFlavorTestJSON-23875077-project-member] Expecting reply to msg 7189f8f5ceef47d6bdb328760583df12 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.287389] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7189f8f5ceef47d6bdb328760583df12 [ 832.287981] env[62740]: DEBUG nova.compute.manager [None req-ac2117ed-d5eb-4992-88e2-8be9df8b68a2 tempest-ServersWithSpecificFlavorTestJSON-23875077 tempest-ServersWithSpecificFlavorTestJSON-23875077-project-member] [instance: 425930c0-b9f8-4966-ae9d-0687d0a07213] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.290789] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2117ed-d5eb-4992-88e2-8be9df8b68a2 tempest-ServersWithSpecificFlavorTestJSON-23875077 tempest-ServersWithSpecificFlavorTestJSON-23875077-project-member] Expecting reply to msg 4ffbe4e323d44d4eb4a91f6902fe06eb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.340246] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ffbe4e323d44d4eb4a91f6902fe06eb [ 832.340922] env[62740]: DEBUG nova.compute.manager [None req-ac2117ed-d5eb-4992-88e2-8be9df8b68a2 tempest-ServersWithSpecificFlavorTestJSON-23875077 tempest-ServersWithSpecificFlavorTestJSON-23875077-project-member] [instance: 425930c0-b9f8-4966-ae9d-0687d0a07213] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 832.341296] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2117ed-d5eb-4992-88e2-8be9df8b68a2 tempest-ServersWithSpecificFlavorTestJSON-23875077 tempest-ServersWithSpecificFlavorTestJSON-23875077-project-member] Expecting reply to msg 57293561835943a1a3c189ac7ac4142b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.345037] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82cd9038d67f44029835cffdbe6dc32d [ 832.355599] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57293561835943a1a3c189ac7ac4142b [ 832.366795] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 0c2e8d4920eb46a38359d710fd3e3906 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.374099] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2117ed-d5eb-4992-88e2-8be9df8b68a2 tempest-ServersWithSpecificFlavorTestJSON-23875077 tempest-ServersWithSpecificFlavorTestJSON-23875077-project-member] Lock "425930c0-b9f8-4966-ae9d-0687d0a07213" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.564s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.374684] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e5903377-dce5-4298-b3cf-664569c65ee7 tempest-ServersAdmin275Test-2081354469 tempest-ServersAdmin275Test-2081354469-project-member] Expecting reply to msg d06606bcf3af4850907b7a4f40767b0f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.388163] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d06606bcf3af4850907b7a4f40767b0f [ 832.388685] env[62740]: DEBUG nova.compute.manager [None req-e5903377-dce5-4298-b3cf-664569c65ee7 tempest-ServersAdmin275Test-2081354469 tempest-ServersAdmin275Test-2081354469-project-member] [instance: b2ec3212-25e1-4027-801d-a23309a4d0e6] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.391018] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e5903377-dce5-4298-b3cf-664569c65ee7 tempest-ServersAdmin275Test-2081354469 tempest-ServersAdmin275Test-2081354469-project-member] Expecting reply to msg d65e7496e9ce4ac8a4f132aa583ce838 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.426349] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d65e7496e9ce4ac8a4f132aa583ce838 [ 832.427192] env[62740]: DEBUG nova.compute.manager [None req-e5903377-dce5-4298-b3cf-664569c65ee7 tempest-ServersAdmin275Test-2081354469 tempest-ServersAdmin275Test-2081354469-project-member] [instance: b2ec3212-25e1-4027-801d-a23309a4d0e6] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 832.427483] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e5903377-dce5-4298-b3cf-664569c65ee7 tempest-ServersAdmin275Test-2081354469 tempest-ServersAdmin275Test-2081354469-project-member] Expecting reply to msg e94b2b742bf04804a725e636fc8b32e3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.440502] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e94b2b742bf04804a725e636fc8b32e3 [ 832.444064] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c2e8d4920eb46a38359d710fd3e3906 [ 832.446818] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "fece072a-baac-4301-988c-0068d6e71cff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.291s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.447162] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f5ca4bf9-db71-4bc1-b003-a2aa7eaa9276 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg f64006f039c447628a6425e9b3d0578b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.453202] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e5903377-dce5-4298-b3cf-664569c65ee7 tempest-ServersAdmin275Test-2081354469 tempest-ServersAdmin275Test-2081354469-project-member] Lock "b2ec3212-25e1-4027-801d-a23309a4d0e6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.842s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.453202] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7591c0a2-ff4f-4ef4-8115-8fa10d00afd0 tempest-ServersTestBootFromVolume-182780058 tempest-ServersTestBootFromVolume-182780058-project-member] Expecting reply to msg 7b81104d8c3049429b107abdc5c48393 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.458265] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f64006f039c447628a6425e9b3d0578b [ 832.465497] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b81104d8c3049429b107abdc5c48393 [ 832.465960] env[62740]: DEBUG nova.compute.manager [None req-7591c0a2-ff4f-4ef4-8115-8fa10d00afd0 tempest-ServersTestBootFromVolume-182780058 tempest-ServersTestBootFromVolume-182780058-project-member] [instance: 1ae43e6d-c9ac-494d-a7a9-1f6ff538345a] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.467822] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7591c0a2-ff4f-4ef4-8115-8fa10d00afd0 tempest-ServersTestBootFromVolume-182780058 tempest-ServersTestBootFromVolume-182780058-project-member] Expecting reply to msg ef444ce7bb984bb3b50f8d60c812dbf3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.518911] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef444ce7bb984bb3b50f8d60c812dbf3 [ 832.519640] env[62740]: DEBUG nova.compute.manager [None req-7591c0a2-ff4f-4ef4-8115-8fa10d00afd0 tempest-ServersTestBootFromVolume-182780058 tempest-ServersTestBootFromVolume-182780058-project-member] [instance: 1ae43e6d-c9ac-494d-a7a9-1f6ff538345a] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 832.520168] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7591c0a2-ff4f-4ef4-8115-8fa10d00afd0 tempest-ServersTestBootFromVolume-182780058 tempest-ServersTestBootFromVolume-182780058-project-member] Expecting reply to msg d9981d34db3846d08baae0d691f19623 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.532178] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9981d34db3846d08baae0d691f19623 [ 832.544058] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7591c0a2-ff4f-4ef4-8115-8fa10d00afd0 tempest-ServersTestBootFromVolume-182780058 tempest-ServersTestBootFromVolume-182780058-project-member] Lock "1ae43e6d-c9ac-494d-a7a9-1f6ff538345a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.764s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.544645] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0f41c272-e058-4a9b-8675-d44257db4ff9 tempest-ServerGroupTestJSON-1334565079 tempest-ServerGroupTestJSON-1334565079-project-member] Expecting reply to msg 1d9917412daf4817ae580b122c4f0244 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.553833] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d9917412daf4817ae580b122c4f0244 [ 832.554315] env[62740]: DEBUG nova.compute.manager [None req-0f41c272-e058-4a9b-8675-d44257db4ff9 tempest-ServerGroupTestJSON-1334565079 tempest-ServerGroupTestJSON-1334565079-project-member] [instance: d490ad64-a2fe-4c08-b0fc-56b2e00d9c98] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.556032] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0f41c272-e058-4a9b-8675-d44257db4ff9 tempest-ServerGroupTestJSON-1334565079 tempest-ServerGroupTestJSON-1334565079-project-member] Expecting reply to msg 7f4b093336d64670b7c0d0c1c4119a81 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.581463] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f4b093336d64670b7c0d0c1c4119a81 [ 832.581961] env[62740]: DEBUG nova.compute.manager [None req-0f41c272-e058-4a9b-8675-d44257db4ff9 tempest-ServerGroupTestJSON-1334565079 tempest-ServerGroupTestJSON-1334565079-project-member] [instance: d490ad64-a2fe-4c08-b0fc-56b2e00d9c98] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 832.582464] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0f41c272-e058-4a9b-8675-d44257db4ff9 tempest-ServerGroupTestJSON-1334565079 tempest-ServerGroupTestJSON-1334565079-project-member] Expecting reply to msg bd77c6921db746dabfca33f1adda2e67 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.592087] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd77c6921db746dabfca33f1adda2e67 [ 832.604849] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0f41c272-e058-4a9b-8675-d44257db4ff9 tempest-ServerGroupTestJSON-1334565079 tempest-ServerGroupTestJSON-1334565079-project-member] Lock "d490ad64-a2fe-4c08-b0fc-56b2e00d9c98" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.588s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.605655] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9d71d3e1-b8cc-41df-9553-0a1ccd9fd110 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 6c04f19eef224bc6a0fa05fb3b981bb0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.614905] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c04f19eef224bc6a0fa05fb3b981bb0 [ 832.615748] env[62740]: DEBUG nova.compute.manager [None req-9d71d3e1-b8cc-41df-9553-0a1ccd9fd110 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: f7a74539-8a36-450f-aec4-d059670e8f38] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.617459] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9d71d3e1-b8cc-41df-9553-0a1ccd9fd110 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg fd0a351795a84dd18ce3885999235866 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.640831] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd0a351795a84dd18ce3885999235866 [ 832.641444] env[62740]: DEBUG nova.compute.manager [None req-9d71d3e1-b8cc-41df-9553-0a1ccd9fd110 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: f7a74539-8a36-450f-aec4-d059670e8f38] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 832.641798] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9d71d3e1-b8cc-41df-9553-0a1ccd9fd110 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 5ae342a7432e467eb452b20db0a02692 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.654511] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ae342a7432e467eb452b20db0a02692 [ 832.671787] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9d71d3e1-b8cc-41df-9553-0a1ccd9fd110 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "f7a74539-8a36-450f-aec4-d059670e8f38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.421s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.672486] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg f1e768b938e94d269ad9fe544b98f7f5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.683485] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1e768b938e94d269ad9fe544b98f7f5 [ 832.683984] env[62740]: DEBUG nova.compute.manager [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.686221] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg db8b60adba2c421aa911258c564dc1b7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.717827] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db8b60adba2c421aa911258c564dc1b7 [ 832.742529] env[62740]: DEBUG oslo_concurrency.lockutils [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.743024] env[62740]: DEBUG oslo_concurrency.lockutils [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.748610] env[62740]: INFO nova.compute.claims [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 832.750234] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg fa38b3e1fe9d495dbd69c83f2195c4c5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.797020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa38b3e1fe9d495dbd69c83f2195c4c5 [ 832.799067] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 3ce00ceaea7c4f8d9326d359c81b86da in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.810531] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ce00ceaea7c4f8d9326d359c81b86da [ 832.862827] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 5c2088b9795a48f7875d97b70a903cc1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 832.874442] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c2088b9795a48f7875d97b70a903cc1 [ 832.876017] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "e473e254-387f-4581-97bc-bdeab221b10f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.271574] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a65436a-5df3-470d-a326-b2155fba9152 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.282577] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5c7bbf-cbae-42f9-82d0-a90e8803a6f1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.321523] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9203ae16-777f-4f18-9caa-627332e89a01 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.330445] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beac0f0e-3ee4-4d46-8554-d8a81f91818c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.347398] env[62740]: DEBUG nova.compute.provider_tree [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.347398] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 8d676191d5394015b05c57b1451749da in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 833.356429] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d676191d5394015b05c57b1451749da [ 833.357822] env[62740]: DEBUG nova.scheduler.client.report [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 833.360530] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 44e8e796ab3f4b458a587017ed1852a1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 833.381269] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44e8e796ab3f4b458a587017ed1852a1 [ 833.381269] env[62740]: DEBUG oslo_concurrency.lockutils [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.638s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.382091] env[62740]: DEBUG nova.compute.manager [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 833.383474] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg c2d154612b1f411f81a7530dcfd85f2b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 833.440435] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2d154612b1f411f81a7530dcfd85f2b [ 833.441168] env[62740]: DEBUG nova.compute.claims [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 833.441347] env[62740]: DEBUG oslo_concurrency.lockutils [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.441600] env[62740]: DEBUG oslo_concurrency.lockutils [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.443932] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 3a3adacaad5f49c3b190253eab1edfc8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 833.485411] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a3adacaad5f49c3b190253eab1edfc8 [ 833.921656] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245f0283-3832-45c2-9e1d-4da5d084cebe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.929415] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-491c396b-2e58-4556-bdb3-805417965778 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.960087] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba17f3ac-0990-4afd-8881-332223856d99 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.968036] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e277e3bf-34b0-46f6-9d95-a0ab7561670d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.980955] env[62740]: DEBUG nova.compute.provider_tree [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.981505] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg cf10e89b3d54497c9cd8a9e164a78e9d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.020118] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf10e89b3d54497c9cd8a9e164a78e9d [ 834.021187] env[62740]: DEBUG nova.scheduler.client.report [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 834.023446] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 2e55c01c6e8646a2a6a9880f5caa9c6c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.044180] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e55c01c6e8646a2a6a9880f5caa9c6c [ 834.045153] env[62740]: DEBUG oslo_concurrency.lockutils [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.603s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.046400] env[62740]: DEBUG nova.compute.utils [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Conflict updating instance e473e254-387f-4581-97bc-bdeab221b10f. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 834.048298] env[62740]: DEBUG nova.compute.manager [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Instance disappeared during build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2504}} [ 834.048473] env[62740]: DEBUG nova.compute.manager [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 834.048687] env[62740]: DEBUG oslo_concurrency.lockutils [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "refresh_cache-e473e254-387f-4581-97bc-bdeab221b10f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.048825] env[62740]: DEBUG oslo_concurrency.lockutils [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquired lock "refresh_cache-e473e254-387f-4581-97bc-bdeab221b10f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.048977] env[62740]: DEBUG nova.network.neutron [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 834.049397] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 1ff9bdf39a3641bca8bb014cc9f50ad0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.060106] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ff9bdf39a3641bca8bb014cc9f50ad0 [ 834.081862] env[62740]: DEBUG nova.network.neutron [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 834.402929] env[62740]: DEBUG nova.network.neutron [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.403537] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg bcefee9ec1714024a6fc59909c04e71f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.412626] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcefee9ec1714024a6fc59909c04e71f [ 834.413309] env[62740]: DEBUG oslo_concurrency.lockutils [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Releasing lock "refresh_cache-e473e254-387f-4581-97bc-bdeab221b10f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.413528] env[62740]: DEBUG nova.compute.manager [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 834.413714] env[62740]: DEBUG nova.compute.manager [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 834.413884] env[62740]: DEBUG nova.network.neutron [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 834.453309] env[62740]: DEBUG nova.network.neutron [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 834.453782] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 505be3fe92774847a5185cbe7514d131 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.465523] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 505be3fe92774847a5185cbe7514d131 [ 834.466061] env[62740]: DEBUG nova.network.neutron [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.466665] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 405800cef8904fc4b007550dce1a6463 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.475386] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 405800cef8904fc4b007550dce1a6463 [ 834.477195] env[62740]: INFO nova.compute.manager [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Took 0.06 seconds to deallocate network for instance. [ 834.477566] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 1c291a0d77284795ad1443adebf21dc3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.529754] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c291a0d77284795ad1443adebf21dc3 [ 834.531195] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 314a3c0e27a54ec6ad174ee505768fa8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.546481] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 314a3c0e27a54ec6ad174ee505768fa8 [ 834.574494] env[62740]: INFO nova.scheduler.client.report [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Deleted allocations for instance e473e254-387f-4581-97bc-bdeab221b10f [ 834.574647] env[62740]: DEBUG oslo_concurrency.lockutils [None req-120782eb-1cf7-45a7-a7a4-6c6957158676 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "e473e254-387f-4581-97bc-bdeab221b10f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.516s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.575206] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg bb3b599277d844478ac98b7c86f503a2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.576061] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "e473e254-387f-4581-97bc-bdeab221b10f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.701s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.576235] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "e473e254-387f-4581-97bc-bdeab221b10f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.576797] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "e473e254-387f-4581-97bc-bdeab221b10f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.576797] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "e473e254-387f-4581-97bc-bdeab221b10f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.577885] env[62740]: DEBUG nova.objects.instance [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lazy-loading 'flavor' on Instance uuid e473e254-387f-4581-97bc-bdeab221b10f {{(pid=62740) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 834.578761] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 680e2f91288c4fddae1fc2ff8692f786 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.587767] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb3b599277d844478ac98b7c86f503a2 [ 834.588347] env[62740]: DEBUG nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 834.590191] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 0634a9899d8e4f389ef4eb7eecac6500 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.617438] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 680e2f91288c4fddae1fc2ff8692f786 [ 834.618819] env[62740]: DEBUG nova.objects.instance [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lazy-loading 'metadata' on Instance uuid e473e254-387f-4581-97bc-bdeab221b10f {{(pid=62740) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 834.619474] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 147242f34baa48d583919e4b4d9b9bff in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.641010] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0634a9899d8e4f389ef4eb7eecac6500 [ 834.651463] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 147242f34baa48d583919e4b4d9b9bff [ 834.654016] env[62740]: DEBUG nova.objects.base [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Object Instance lazy-loaded attributes: flavor,metadata {{(pid=62740) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 834.654509] env[62740]: INFO nova.compute.manager [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Terminating instance [ 834.657784] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "refresh_cache-e473e254-387f-4581-97bc-bdeab221b10f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.657784] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquired lock "refresh_cache-e473e254-387f-4581-97bc-bdeab221b10f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.657912] env[62740]: DEBUG nova.network.neutron [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 834.658418] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 2662ee389f234d39b5361214aef33b2b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.664441] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.664907] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.666312] env[62740]: INFO nova.compute.claims [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 834.668654] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg d87b204d813745bea21421038237e0d6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.670216] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2662ee389f234d39b5361214aef33b2b [ 834.701305] env[62740]: DEBUG nova.network.neutron [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 834.707226] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d87b204d813745bea21421038237e0d6 [ 834.709120] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg c47616ced47948b18f86007641c69288 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.719830] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c47616ced47948b18f86007641c69288 [ 834.907811] env[62740]: DEBUG nova.network.neutron [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.908833] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 1c0e90ee92e449adb50dea1d80512af4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 834.922922] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c0e90ee92e449adb50dea1d80512af4 [ 834.923704] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Releasing lock "refresh_cache-e473e254-387f-4581-97bc-bdeab221b10f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.926405] env[62740]: DEBUG nova.compute.manager [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 834.926405] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 834.927848] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4ef09a87-383d-4494-a5c3-90a74fef4e82 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.938178] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa5c666-7929-4d2c-a026-11372bc797dc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.972355] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e473e254-387f-4581-97bc-bdeab221b10f could not be found. [ 834.972355] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 834.972355] env[62740]: INFO nova.compute.manager [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 834.972355] env[62740]: DEBUG oslo.service.loopingcall [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.976787] env[62740]: DEBUG nova.compute.manager [-] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 834.976905] env[62740]: DEBUG nova.network.neutron [-] [instance: e473e254-387f-4581-97bc-bdeab221b10f] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 835.005893] env[62740]: DEBUG nova.network.neutron [-] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 835.006784] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 88b6c69676f447bea88e8e5f5c44aa13 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 835.026612] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88b6c69676f447bea88e8e5f5c44aa13 [ 835.027269] env[62740]: DEBUG nova.network.neutron [-] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.027512] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fe7ea7609333471ba5cbf244b0ed7a66 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 835.035707] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe7ea7609333471ba5cbf244b0ed7a66 [ 835.035971] env[62740]: INFO nova.compute.manager [-] [instance: e473e254-387f-4581-97bc-bdeab221b10f] Took 0.06 seconds to deallocate network for instance. [ 835.042946] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg aab5664c354b4dc4b7218a108ec50c11 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 835.108326] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aab5664c354b4dc4b7218a108ec50c11 [ 835.130064] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 6026dd3bd3d34161863ca2eb40bd0530 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 835.151177] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6c1c88-6e53-4f7a-8feb-e859a6f6dd22 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.158742] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfecefa-7d84-4560-88a0-641b3edb935e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.194464] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6026dd3bd3d34161863ca2eb40bd0530 [ 835.197711] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "e473e254-387f-4581-97bc-bdeab221b10f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.622s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.198075] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5193e630-f331-4933-8d54-01859b062b22 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 18b32f25df634a3fb0035b01fe7d09cc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 835.202878] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea00c035-99ce-4a84-9b86-119224439b08 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.211641] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ef0a1e-1e3d-49db-aa74-2299694f0737 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.219330] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18b32f25df634a3fb0035b01fe7d09cc [ 835.230027] env[62740]: DEBUG nova.compute.provider_tree [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.230319] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 0b938bcd4c2c4db59f12e6c22beb4eb2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 835.242150] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b938bcd4c2c4db59f12e6c22beb4eb2 [ 835.243163] env[62740]: DEBUG nova.scheduler.client.report [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 835.245652] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg a5fd604e13db4e29b5ad2b212c140547 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 835.262847] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5fd604e13db4e29b5ad2b212c140547 [ 835.263710] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.599s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.264209] env[62740]: DEBUG nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 835.266384] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 9389352fd84d49b4a99e7d476cbf9b25 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 835.316310] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9389352fd84d49b4a99e7d476cbf9b25 [ 835.317699] env[62740]: DEBUG nova.compute.utils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 835.318298] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 1eb8b397f0994fdcabef36413bec7eec in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 835.319997] env[62740]: DEBUG nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 835.320909] env[62740]: DEBUG nova.network.neutron [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 835.333646] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1eb8b397f0994fdcabef36413bec7eec [ 835.334176] env[62740]: DEBUG nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 835.336020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 75457628a250401dac1a5cb6d19c0f3e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 835.390490] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75457628a250401dac1a5cb6d19c0f3e [ 835.394719] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 8beca5111976451d822e9feff94b6fe3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 835.423694] env[62740]: DEBUG nova.policy [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5884585b3d78418c9dbd97fb65597906', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd55bbcf1169944c591b1379a8063fbdb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 835.432247] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8beca5111976451d822e9feff94b6fe3 [ 835.433499] env[62740]: DEBUG nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 835.463858] env[62740]: DEBUG nova.virt.hardware [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 835.464391] env[62740]: DEBUG nova.virt.hardware [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 835.464391] env[62740]: DEBUG nova.virt.hardware [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 835.464483] env[62740]: DEBUG nova.virt.hardware [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 835.465039] env[62740]: DEBUG nova.virt.hardware [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 835.465039] env[62740]: DEBUG nova.virt.hardware [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 835.465039] env[62740]: DEBUG nova.virt.hardware [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 835.465372] env[62740]: DEBUG nova.virt.hardware [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 835.465372] env[62740]: DEBUG nova.virt.hardware [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 835.465890] env[62740]: DEBUG nova.virt.hardware [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 835.466296] env[62740]: DEBUG nova.virt.hardware [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 835.467437] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49dff2a5-4e1c-41a5-a249-ac22a2d7e4bc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.476953] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68bd4c38-8e76-4b7b-a12f-1675ae68cac0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.682589] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg b881660096564f2b9781869aaf8e333b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 835.703747] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b881660096564f2b9781869aaf8e333b [ 835.704274] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Acquiring lock "68aa9321-22ce-45a0-8323-fa8564dca46b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.409111] env[62740]: DEBUG nova.network.neutron [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Successfully created port: 2b3b81db-2267-4e1b-a249-feb32e29db8c {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 836.735539] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "472cd209-4192-4473-b788-d1ea342653bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.736175] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "472cd209-4192-4473-b788-d1ea342653bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.493374] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "d8dac9af-0897-4fbf-8ee6-1fb3955d48c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.494124] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "d8dac9af-0897-4fbf-8ee6-1fb3955d48c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.093021] env[62740]: DEBUG nova.network.neutron [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Successfully updated port: 2b3b81db-2267-4e1b-a249-feb32e29db8c {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 838.093787] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 169caab71c6e48248d509c1f7afbbacf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 838.111358] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 169caab71c6e48248d509c1f7afbbacf [ 838.111358] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Acquiring lock "refresh_cache-68aa9321-22ce-45a0-8323-fa8564dca46b" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.111447] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Acquired lock "refresh_cache-68aa9321-22ce-45a0-8323-fa8564dca46b" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.111528] env[62740]: DEBUG nova.network.neutron [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 838.111918] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 7e82dcd1b88b42f489a1e1dd3c86c16d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 838.121897] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e82dcd1b88b42f489a1e1dd3c86c16d [ 838.196204] env[62740]: DEBUG nova.network.neutron [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.842347] env[62740]: DEBUG nova.network.neutron [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Updating instance_info_cache with network_info: [{"id": "2b3b81db-2267-4e1b-a249-feb32e29db8c", "address": "fa:16:3e:31:f9:98", "network": {"id": "8e019b7e-aed1-4078-9dcc-65c6cb3f34a6", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-2064973225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d55bbcf1169944c591b1379a8063fbdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b3b81db-22", "ovs_interfaceid": "2b3b81db-2267-4e1b-a249-feb32e29db8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.842827] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg b46b05a5eb7b4b05ad580772293b5488 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 838.854879] env[62740]: DEBUG nova.compute.manager [req-84bf9ca8-2e11-4511-91fe-3f4761f758e4 req-0a1c1b2b-5f2c-4aea-93df-290e49139783 service nova] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Received event network-vif-plugged-2b3b81db-2267-4e1b-a249-feb32e29db8c {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 838.854978] env[62740]: DEBUG oslo_concurrency.lockutils [req-84bf9ca8-2e11-4511-91fe-3f4761f758e4 req-0a1c1b2b-5f2c-4aea-93df-290e49139783 service nova] Acquiring lock "68aa9321-22ce-45a0-8323-fa8564dca46b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.855634] env[62740]: DEBUG oslo_concurrency.lockutils [req-84bf9ca8-2e11-4511-91fe-3f4761f758e4 req-0a1c1b2b-5f2c-4aea-93df-290e49139783 service nova] Lock "68aa9321-22ce-45a0-8323-fa8564dca46b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.855634] env[62740]: DEBUG oslo_concurrency.lockutils [req-84bf9ca8-2e11-4511-91fe-3f4761f758e4 req-0a1c1b2b-5f2c-4aea-93df-290e49139783 service nova] Lock "68aa9321-22ce-45a0-8323-fa8564dca46b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.855634] env[62740]: DEBUG nova.compute.manager [req-84bf9ca8-2e11-4511-91fe-3f4761f758e4 req-0a1c1b2b-5f2c-4aea-93df-290e49139783 service nova] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] No waiting events found dispatching network-vif-plugged-2b3b81db-2267-4e1b-a249-feb32e29db8c {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 838.855960] env[62740]: WARNING nova.compute.manager [req-84bf9ca8-2e11-4511-91fe-3f4761f758e4 req-0a1c1b2b-5f2c-4aea-93df-290e49139783 service nova] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Received unexpected event network-vif-plugged-2b3b81db-2267-4e1b-a249-feb32e29db8c for instance with vm_state building and task_state deleting. [ 838.863620] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b46b05a5eb7b4b05ad580772293b5488 [ 838.863894] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Releasing lock "refresh_cache-68aa9321-22ce-45a0-8323-fa8564dca46b" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.864196] env[62740]: DEBUG nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Instance network_info: |[{"id": "2b3b81db-2267-4e1b-a249-feb32e29db8c", "address": "fa:16:3e:31:f9:98", "network": {"id": "8e019b7e-aed1-4078-9dcc-65c6cb3f34a6", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-2064973225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d55bbcf1169944c591b1379a8063fbdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b3b81db-22", "ovs_interfaceid": "2b3b81db-2267-4e1b-a249-feb32e29db8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 838.864594] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:f9:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a34aa30-95be-4b18-98ca-1f2d81f7e9e6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2b3b81db-2267-4e1b-a249-feb32e29db8c', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.875709] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Creating folder: Project (d55bbcf1169944c591b1379a8063fbdb). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 838.876866] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa829710-3380-4ab0-b9fd-10032995e0fe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.891019] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Created folder: Project (d55bbcf1169944c591b1379a8063fbdb) in parent group-v156037. [ 838.891019] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Creating folder: Instances. Parent ref: group-v156091. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 838.891019] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce3cb60a-8081-4f65-afc5-1345436e5744 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.899187] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Created folder: Instances in parent group-v156091. [ 838.899734] env[62740]: DEBUG oslo.service.loopingcall [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.900182] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 838.900511] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0af799a-b3f6-4aea-aea2-a5be0846423b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.923017] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.923017] env[62740]: value = "task-640121" [ 838.923017] env[62740]: _type = "Task" [ 838.923017] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.930842] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640121, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.433591] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640121, 'name': CreateVM_Task, 'duration_secs': 0.287986} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.433907] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 839.435336] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.435584] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.435984] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 839.436327] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41bf363a-a842-4b61-a547-32d2a71087b2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.442532] env[62740]: DEBUG oslo_vmware.api [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Waiting for the task: (returnval){ [ 839.442532] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52d37fe6-87b9-66ea-cf02-b9a63b6122da" [ 839.442532] env[62740]: _type = "Task" [ 839.442532] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.453760] env[62740]: DEBUG oslo_vmware.api [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52d37fe6-87b9-66ea-cf02-b9a63b6122da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.957662] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.959831] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.959831] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.059555] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Acquiring lock "2162ea32-6407-4286-9340-b62a9ec0988e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.060041] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Lock "2162ea32-6407-4286-9340-b62a9ec0988e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.107146] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Acquiring lock "d2669ffb-41b1-474d-bb7a-fabae11e69d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.107146] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Lock "d2669ffb-41b1-474d-bb7a-fabae11e69d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.430767] env[62740]: DEBUG nova.compute.manager [req-d5af1502-cb42-4946-84a1-66a116f55fa8 req-194d5750-80b9-49b9-93cc-b35e5b50d96c service nova] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Received event network-changed-2b3b81db-2267-4e1b-a249-feb32e29db8c {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 841.430984] env[62740]: DEBUG nova.compute.manager [req-d5af1502-cb42-4946-84a1-66a116f55fa8 req-194d5750-80b9-49b9-93cc-b35e5b50d96c service nova] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Refreshing instance network info cache due to event network-changed-2b3b81db-2267-4e1b-a249-feb32e29db8c. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 841.431226] env[62740]: DEBUG oslo_concurrency.lockutils [req-d5af1502-cb42-4946-84a1-66a116f55fa8 req-194d5750-80b9-49b9-93cc-b35e5b50d96c service nova] Acquiring lock "refresh_cache-68aa9321-22ce-45a0-8323-fa8564dca46b" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.431373] env[62740]: DEBUG oslo_concurrency.lockutils [req-d5af1502-cb42-4946-84a1-66a116f55fa8 req-194d5750-80b9-49b9-93cc-b35e5b50d96c service nova] Acquired lock "refresh_cache-68aa9321-22ce-45a0-8323-fa8564dca46b" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.431532] env[62740]: DEBUG nova.network.neutron [req-d5af1502-cb42-4946-84a1-66a116f55fa8 req-194d5750-80b9-49b9-93cc-b35e5b50d96c service nova] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Refreshing network info cache for port 2b3b81db-2267-4e1b-a249-feb32e29db8c {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 841.432396] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-d5af1502-cb42-4946-84a1-66a116f55fa8 req-194d5750-80b9-49b9-93cc-b35e5b50d96c service nova] Expecting reply to msg f766f7c095894c76b5bb3504096caf67 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 841.444385] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f766f7c095894c76b5bb3504096caf67 [ 841.821187] env[62740]: DEBUG nova.network.neutron [req-d5af1502-cb42-4946-84a1-66a116f55fa8 req-194d5750-80b9-49b9-93cc-b35e5b50d96c service nova] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Updated VIF entry in instance network info cache for port 2b3b81db-2267-4e1b-a249-feb32e29db8c. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 841.822990] env[62740]: DEBUG nova.network.neutron [req-d5af1502-cb42-4946-84a1-66a116f55fa8 req-194d5750-80b9-49b9-93cc-b35e5b50d96c service nova] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Updating instance_info_cache with network_info: [{"id": "2b3b81db-2267-4e1b-a249-feb32e29db8c", "address": "fa:16:3e:31:f9:98", "network": {"id": "8e019b7e-aed1-4078-9dcc-65c6cb3f34a6", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-2064973225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d55bbcf1169944c591b1379a8063fbdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b3b81db-22", "ovs_interfaceid": "2b3b81db-2267-4e1b-a249-feb32e29db8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.822990] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-d5af1502-cb42-4946-84a1-66a116f55fa8 req-194d5750-80b9-49b9-93cc-b35e5b50d96c service nova] Expecting reply to msg a05ad4449ff949dfa79e1bfc3f0b5d78 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 841.836499] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a05ad4449ff949dfa79e1bfc3f0b5d78 [ 841.836729] env[62740]: DEBUG oslo_concurrency.lockutils [req-d5af1502-cb42-4946-84a1-66a116f55fa8 req-194d5750-80b9-49b9-93cc-b35e5b50d96c service nova] Releasing lock "refresh_cache-68aa9321-22ce-45a0-8323-fa8564dca46b" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.781379] env[62740]: DEBUG oslo_concurrency.lockutils [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Acquiring lock "f7574228-f7fc-4ab0-9a38-7671046d46a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.781938] env[62740]: DEBUG oslo_concurrency.lockutils [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Lock "f7574228-f7fc-4ab0-9a38-7671046d46a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.809705] env[62740]: DEBUG oslo_concurrency.lockutils [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Acquiring lock "b1b86050-2bb1-443b-967b-12531d71ba04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.810222] env[62740]: DEBUG oslo_concurrency.lockutils [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Lock "b1b86050-2bb1-443b-967b-12531d71ba04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.834216] env[62740]: DEBUG oslo_concurrency.lockutils [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Acquiring lock "285dcc26-a4e9-40bc-82dd-37931f46e7fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.834483] env[62740]: DEBUG oslo_concurrency.lockutils [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Lock "285dcc26-a4e9-40bc-82dd-37931f46e7fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.091484] env[62740]: DEBUG oslo_concurrency.lockutils [None req-44c3be26-0c85-4fd2-bfee-78d03eaf4ded tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Acquiring lock "b0995d6c-a700-47a3-a39d-6a6fe1462042" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.091484] env[62740]: DEBUG oslo_concurrency.lockutils [None req-44c3be26-0c85-4fd2-bfee-78d03eaf4ded tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Lock "b0995d6c-a700-47a3-a39d-6a6fe1462042" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.374808] env[62740]: WARNING oslo_vmware.rw_handles [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 844.374808] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 844.374808] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 844.374808] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 844.374808] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 844.374808] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 844.374808] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 844.374808] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 844.374808] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 844.374808] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 844.374808] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 844.374808] env[62740]: ERROR oslo_vmware.rw_handles [ 844.375516] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/06dcec66-fb94-4cde-8199-fa8ec4b360d4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 844.377266] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 844.377588] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Copying Virtual Disk [datastore1] vmware_temp/06dcec66-fb94-4cde-8199-fa8ec4b360d4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/06dcec66-fb94-4cde-8199-fa8ec4b360d4/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 844.378309] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8327aa23-7e78-4cdb-ae0f-de5047c501de {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.386771] env[62740]: DEBUG oslo_vmware.api [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Waiting for the task: (returnval){ [ 844.386771] env[62740]: value = "task-640122" [ 844.386771] env[62740]: _type = "Task" [ 844.386771] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.395434] env[62740]: DEBUG oslo_vmware.api [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Task: {'id': task-640122, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.898558] env[62740]: DEBUG oslo_vmware.exceptions [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 844.898843] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.899462] env[62740]: ERROR nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 844.899462] env[62740]: Faults: ['InvalidArgument'] [ 844.899462] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Traceback (most recent call last): [ 844.899462] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 844.899462] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] yield resources [ 844.899462] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 844.899462] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] self.driver.spawn(context, instance, image_meta, [ 844.899462] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 844.899462] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 844.899462] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 844.899462] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] self._fetch_image_if_missing(context, vi) [ 844.899462] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 844.899876] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] image_cache(vi, tmp_image_ds_loc) [ 844.899876] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 844.899876] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] vm_util.copy_virtual_disk( [ 844.899876] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 844.899876] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] session._wait_for_task(vmdk_copy_task) [ 844.899876] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 844.899876] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] return self.wait_for_task(task_ref) [ 844.899876] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 844.899876] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] return evt.wait() [ 844.899876] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 844.899876] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] result = hub.switch() [ 844.899876] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 844.899876] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] return self.greenlet.switch() [ 844.900284] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 844.900284] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] self.f(*self.args, **self.kw) [ 844.900284] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 844.900284] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] raise exceptions.translate_fault(task_info.error) [ 844.900284] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 844.900284] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Faults: ['InvalidArgument'] [ 844.900284] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] [ 844.900284] env[62740]: INFO nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Terminating instance [ 844.901446] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.901658] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 844.901891] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-517e7e84-97fb-4a40-a009-d26f3c47dca3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.904736] env[62740]: DEBUG nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 844.904931] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 844.905660] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12f10fb-c45b-40a2-b968-3edf006b4095 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.909649] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 844.909830] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 844.912196] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30295325-d842-49ff-a50e-ebd9d0199213 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.914296] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 844.914709] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3070e469-b4ad-4258-9012-e2f79b4dcff5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.919270] env[62740]: DEBUG oslo_vmware.api [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Waiting for the task: (returnval){ [ 844.919270] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]527f48e9-7c76-c95e-6dcd-e3eab4598621" [ 844.919270] env[62740]: _type = "Task" [ 844.919270] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.926409] env[62740]: DEBUG oslo_vmware.api [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]527f48e9-7c76-c95e-6dcd-e3eab4598621, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.024782] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 845.025190] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 845.025507] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Deleting the datastore file [datastore1] f98589dc-ea7a-44c8-8cca-119d126ea0de {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 845.025890] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a584c72-abaf-4d2c-92c6-c250d3602d8c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.035421] env[62740]: DEBUG oslo_vmware.api [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Waiting for the task: (returnval){ [ 845.035421] env[62740]: value = "task-640124" [ 845.035421] env[62740]: _type = "Task" [ 845.035421] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.046724] env[62740]: DEBUG oslo_vmware.api [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Task: {'id': task-640124, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.429199] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 845.429521] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Creating directory with path [datastore1] vmware_temp/1419c9c4-88f2-4afd-bc10-33fdac69a158/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.429746] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b1a9f00-7921-467e-91b8-07c7ae39ff61 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.441150] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Created directory with path [datastore1] vmware_temp/1419c9c4-88f2-4afd-bc10-33fdac69a158/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.441350] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Fetch image to [datastore1] vmware_temp/1419c9c4-88f2-4afd-bc10-33fdac69a158/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 845.441525] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/1419c9c4-88f2-4afd-bc10-33fdac69a158/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 845.442280] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b15047-f2ac-4510-ba12-6047d49a55f4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.448991] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88baf71-bb82-4dfb-b76b-40a708ca63b9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.459298] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b168b8-791f-4023-be12-327669174d19 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.489087] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1135570-645d-4897-b18d-2f74a1ab4098 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.494856] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5998ee92-6568-4e3d-a8d1-2c3d6d58b5a3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.520216] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 845.546674] env[62740]: DEBUG oslo_vmware.api [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Task: {'id': task-640124, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068333} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.550270] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 845.550485] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 845.550644] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 845.551099] env[62740]: INFO nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Took 0.65 seconds to destroy the instance on the hypervisor. [ 845.553500] env[62740]: DEBUG nova.compute.claims [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 845.553500] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.553745] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.555573] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 0971627b724d4ff696f17cd039899d21 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 845.581858] env[62740]: DEBUG oslo_vmware.rw_handles [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1419c9c4-88f2-4afd-bc10-33fdac69a158/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 845.640057] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0971627b724d4ff696f17cd039899d21 [ 845.646237] env[62740]: DEBUG oslo_vmware.rw_handles [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 845.646237] env[62740]: DEBUG oslo_vmware.rw_handles [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1419c9c4-88f2-4afd-bc10-33fdac69a158/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 845.987409] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327f5100-c21f-4fc8-a52d-22dd541abfcb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.994984] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6962839b-5447-4287-b826-be720e5c93e2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.026447] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73bd0be5-7d9c-4e52-98ad-71f355acb5c3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.033766] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf8efc39-e7a5-4131-879a-6c0095aa6598 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.047388] env[62740]: DEBUG nova.compute.provider_tree [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.047933] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 9e531b5c22574b669c41f5132827e19c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.056773] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e531b5c22574b669c41f5132827e19c [ 846.057724] env[62740]: DEBUG nova.scheduler.client.report [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 846.060119] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 14301b7e218a4d8591d02386691b2e0b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.073987] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14301b7e218a4d8591d02386691b2e0b [ 846.074799] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.521s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.075340] env[62740]: ERROR nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 846.075340] env[62740]: Faults: ['InvalidArgument'] [ 846.075340] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Traceback (most recent call last): [ 846.075340] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 846.075340] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] self.driver.spawn(context, instance, image_meta, [ 846.075340] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 846.075340] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 846.075340] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 846.075340] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] self._fetch_image_if_missing(context, vi) [ 846.075340] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 846.075340] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] image_cache(vi, tmp_image_ds_loc) [ 846.075340] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 846.075705] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] vm_util.copy_virtual_disk( [ 846.075705] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 846.075705] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] session._wait_for_task(vmdk_copy_task) [ 846.075705] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 846.075705] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] return self.wait_for_task(task_ref) [ 846.075705] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 846.075705] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] return evt.wait() [ 846.075705] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 846.075705] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] result = hub.switch() [ 846.075705] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 846.075705] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] return self.greenlet.switch() [ 846.075705] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 846.075705] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] self.f(*self.args, **self.kw) [ 846.076065] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 846.076065] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] raise exceptions.translate_fault(task_info.error) [ 846.076065] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 846.076065] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Faults: ['InvalidArgument'] [ 846.076065] env[62740]: ERROR nova.compute.manager [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] [ 846.076286] env[62740]: DEBUG nova.compute.utils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 846.079086] env[62740]: DEBUG nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Build of instance f98589dc-ea7a-44c8-8cca-119d126ea0de was re-scheduled: A specified parameter was not correct: fileType [ 846.079086] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 846.079506] env[62740]: DEBUG nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 846.079691] env[62740]: DEBUG nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 846.079866] env[62740]: DEBUG nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 846.080037] env[62740]: DEBUG nova.network.neutron [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 846.466670] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 5f27ab64793d4877998d8d37e1c3a1ab in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.481202] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f27ab64793d4877998d8d37e1c3a1ab [ 846.481202] env[62740]: DEBUG nova.network.neutron [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.481202] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 83b9e8fa38f44905986bf73d0509cd39 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.498198] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83b9e8fa38f44905986bf73d0509cd39 [ 846.498921] env[62740]: INFO nova.compute.manager [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Took 0.42 seconds to deallocate network for instance. [ 846.500676] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 8718111598a84092bf6b189753370ba7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.555421] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8718111598a84092bf6b189753370ba7 [ 846.559150] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 69c8650948884226952e4cf33857ee77 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.598191] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69c8650948884226952e4cf33857ee77 [ 846.628478] env[62740]: INFO nova.scheduler.client.report [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Deleted allocations for instance f98589dc-ea7a-44c8-8cca-119d126ea0de [ 846.639332] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 321c25d5a6e74709a0051a2dc89eb135 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.655197] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 321c25d5a6e74709a0051a2dc89eb135 [ 846.655917] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6a15e2d8-b294-4557-b55c-67bda67ca5c5 tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Lock "f98589dc-ea7a-44c8-8cca-119d126ea0de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.312s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.656390] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-95318742-84af-4fb6-ad3f-3c5466ba567e tempest-TenantUsagesTestJSON-2045170971 tempest-TenantUsagesTestJSON-2045170971-project-member] Expecting reply to msg 0df04611d2fb4d5cb0b4b91d80f038cf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.657606] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Lock "f98589dc-ea7a-44c8-8cca-119d126ea0de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 34.279s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.657842] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Acquiring lock "f98589dc-ea7a-44c8-8cca-119d126ea0de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.658065] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Lock "f98589dc-ea7a-44c8-8cca-119d126ea0de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.658265] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Lock "f98589dc-ea7a-44c8-8cca-119d126ea0de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.661230] env[62740]: INFO nova.compute.manager [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Terminating instance [ 846.663598] env[62740]: DEBUG nova.compute.manager [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 846.663772] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 846.664029] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6209394a-fb80-4383-923e-09b1011009ea {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.667298] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0df04611d2fb4d5cb0b4b91d80f038cf [ 846.668040] env[62740]: DEBUG nova.compute.manager [None req-95318742-84af-4fb6-ad3f-3c5466ba567e tempest-TenantUsagesTestJSON-2045170971 tempest-TenantUsagesTestJSON-2045170971-project-member] [instance: 4ec9a397-1e4b-4767-b926-ccc6f63a951c] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 846.670138] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-95318742-84af-4fb6-ad3f-3c5466ba567e tempest-TenantUsagesTestJSON-2045170971 tempest-TenantUsagesTestJSON-2045170971-project-member] Expecting reply to msg 85d22662bbb14049bae894c61fb50ad5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.676487] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857e07ea-6f51-47b3-b72b-104eecb6dbf7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.695570] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85d22662bbb14049bae894c61fb50ad5 [ 846.696164] env[62740]: DEBUG nova.compute.manager [None req-95318742-84af-4fb6-ad3f-3c5466ba567e tempest-TenantUsagesTestJSON-2045170971 tempest-TenantUsagesTestJSON-2045170971-project-member] [instance: 4ec9a397-1e4b-4767-b926-ccc6f63a951c] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 846.696501] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-95318742-84af-4fb6-ad3f-3c5466ba567e tempest-TenantUsagesTestJSON-2045170971 tempest-TenantUsagesTestJSON-2045170971-project-member] Expecting reply to msg eed6f1d8b0f04951a8eeb76017ff2710 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.706855] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f98589dc-ea7a-44c8-8cca-119d126ea0de could not be found. [ 846.707119] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 846.707405] env[62740]: INFO nova.compute.manager [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Took 0.04 seconds to destroy the instance on the hypervisor. [ 846.707734] env[62740]: DEBUG oslo.service.loopingcall [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 846.708166] env[62740]: DEBUG nova.compute.manager [-] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 846.708325] env[62740]: DEBUG nova.network.neutron [-] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 846.719515] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eed6f1d8b0f04951a8eeb76017ff2710 [ 846.725478] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e799cf2813d24bfc9ee69cad3fae9bb3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.730997] env[62740]: DEBUG oslo_concurrency.lockutils [None req-95318742-84af-4fb6-ad3f-3c5466ba567e tempest-TenantUsagesTestJSON-2045170971 tempest-TenantUsagesTestJSON-2045170971-project-member] Lock "4ec9a397-1e4b-4767-b926-ccc6f63a951c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.507s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.731549] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Expecting reply to msg 4a17cbb12b1c4876b27d672ecfb77e85 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.733028] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e799cf2813d24bfc9ee69cad3fae9bb3 [ 846.733028] env[62740]: DEBUG nova.network.neutron [-] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.733582] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a2512a4380724347881bcf3a37f76c6f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.741308] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a17cbb12b1c4876b27d672ecfb77e85 [ 846.742140] env[62740]: DEBUG nova.compute.manager [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] [instance: e1c7a748-b3f3-41b7-8784-13699549a01d] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 846.743707] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Expecting reply to msg 8e22b4be68b2447f8cadb4a1aa25a495 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.746021] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2512a4380724347881bcf3a37f76c6f [ 846.746021] env[62740]: INFO nova.compute.manager [-] [instance: f98589dc-ea7a-44c8-8cca-119d126ea0de] Took 0.04 seconds to deallocate network for instance. [ 846.748851] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 94596978839142e092ffcd2b8652af99 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.766809] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e22b4be68b2447f8cadb4a1aa25a495 [ 846.768020] env[62740]: DEBUG nova.compute.manager [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] [instance: e1c7a748-b3f3-41b7-8784-13699549a01d] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 846.768020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Expecting reply to msg d0d11c94e1f34c3abf1047309938d6bd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.777090] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94596978839142e092ffcd2b8652af99 [ 846.777633] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0d11c94e1f34c3abf1047309938d6bd [ 846.793988] env[62740]: DEBUG oslo_concurrency.lockutils [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Lock "e1c7a748-b3f3-41b7-8784-13699549a01d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.934s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.794617] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Expecting reply to msg 809e7cbf10c74d0d870708324f9637ab in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.796972] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg 7b8e6e2367a64697ba7f454048d3f9b4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.805588] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 809e7cbf10c74d0d870708324f9637ab [ 846.805773] env[62740]: DEBUG nova.compute.manager [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] [instance: 25cef75e-2176-4999-965b-155cd7f8d137] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 846.807568] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Expecting reply to msg 06ea3ab45210431b9a70ca467851fc4e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.838277] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b8e6e2367a64697ba7f454048d3f9b4 [ 846.841121] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06ea3ab45210431b9a70ca467851fc4e [ 846.842089] env[62740]: DEBUG nova.compute.manager [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] [instance: 25cef75e-2176-4999-965b-155cd7f8d137] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 846.842635] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Expecting reply to msg 29a339f1c3ac4abea4772a26bc836687 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.843830] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Lock "f98589dc-ea7a-44c8-8cca-119d126ea0de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.187s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.844297] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e914500-d7f5-4574-9f02-720333161fba tempest-ServerAddressesTestJSON-2119483075 tempest-ServerAddressesTestJSON-2119483075-project-member] Expecting reply to msg a8463934094d41cc9743562801737df6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.861968] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29a339f1c3ac4abea4772a26bc836687 [ 846.864330] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8463934094d41cc9743562801737df6 [ 846.874135] env[62740]: DEBUG oslo_concurrency.lockutils [None req-27895476-da6b-47f1-82c4-8733c058c222 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Lock "25cef75e-2176-4999-965b-155cd7f8d137" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.976s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.874721] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 541484179386469faad73a4f4a2cc0d0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.884213] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 541484179386469faad73a4f4a2cc0d0 [ 846.884680] env[62740]: DEBUG nova.compute.manager [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 846.886541] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 7b5fbcb096b9453589d92f99cdf1e0fe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.922382] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b5fbcb096b9453589d92f99cdf1e0fe [ 846.941019] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.941019] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.941019] env[62740]: INFO nova.compute.claims [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 846.942380] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 5fd75fedd0a946ffb5887b2cafaaec8f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.977584] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fd75fedd0a946ffb5887b2cafaaec8f [ 846.979481] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 2038cd953bb84f9caa5e0f52ffb77133 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 846.986418] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2038cd953bb84f9caa5e0f52ffb77133 [ 847.357645] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad72804-a17a-4595-a26c-9a907f7a9378 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.365166] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2731e5-6d42-422a-9c45-1bba688c1ae5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.399276] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9dade3-075f-4584-9c57-215a7ebf4df4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.403448] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 9cce6a34c6614b3c8d348e37af3ad32d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 847.412875] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14f6fe6-72e0-4283-ad82-095c73e4d361 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.417233] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9cce6a34c6614b3c8d348e37af3ad32d [ 847.418151] env[62740]: DEBUG oslo_concurrency.lockutils [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Acquiring lock "0e5caaed-20ff-40bd-b0cf-016ac18642cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.429226] env[62740]: DEBUG nova.compute.provider_tree [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.429501] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 6ddfcecc042949f18dda147753f45df2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 847.436437] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ddfcecc042949f18dda147753f45df2 [ 847.437354] env[62740]: DEBUG nova.scheduler.client.report [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 847.439849] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 55caa18b6aa0423f90040dd5aff02a8c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 847.453048] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55caa18b6aa0423f90040dd5aff02a8c [ 847.453048] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.514s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.453048] env[62740]: DEBUG nova.compute.manager [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 847.454632] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 2fac7096a6ac44ee8512ccbe310a7157 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 847.492030] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fac7096a6ac44ee8512ccbe310a7157 [ 847.492710] env[62740]: DEBUG nova.compute.claims [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 847.493763] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.493763] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.496642] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg a042dd9aba084ee59f832c802e42e282 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 847.529655] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a042dd9aba084ee59f832c802e42e282 [ 847.929495] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbe7a24-c73f-4bed-8029-b162336251c5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.937765] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfac4ea-22bb-43cc-8ab5-e0ce3a49acf4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.967978] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474c5f46-b04b-4717-8ec4-afe81e76487f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.975323] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a41829-c5a9-4f35-8367-a1c9db8527a2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.989689] env[62740]: DEBUG nova.compute.provider_tree [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.990410] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg debd9d1d480c4156b303c977717922ad in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.008313] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg debd9d1d480c4156b303c977717922ad [ 848.009386] env[62740]: DEBUG nova.scheduler.client.report [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 848.011859] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 881defe1fdf84b92959f6d86c9b96fc4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.023235] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 881defe1fdf84b92959f6d86c9b96fc4 [ 848.024115] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.531s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.024846] env[62740]: DEBUG nova.compute.utils [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Conflict updating instance 0e5caaed-20ff-40bd-b0cf-016ac18642cf. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 848.026858] env[62740]: DEBUG nova.compute.manager [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Instance disappeared during build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2504}} [ 848.027040] env[62740]: DEBUG nova.compute.manager [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 848.027262] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Acquiring lock "refresh_cache-0e5caaed-20ff-40bd-b0cf-016ac18642cf" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.027405] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Acquired lock "refresh_cache-0e5caaed-20ff-40bd-b0cf-016ac18642cf" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.027563] env[62740]: DEBUG nova.network.neutron [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 848.028101] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 40dd757d1ef54523b55043923721b3fb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.034656] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40dd757d1ef54523b55043923721b3fb [ 848.053875] env[62740]: DEBUG nova.network.neutron [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.401777] env[62740]: DEBUG nova.network.neutron [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.402325] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 7104d80d734547b6b1ed4b60c3e28ab2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.410923] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7104d80d734547b6b1ed4b60c3e28ab2 [ 848.411499] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Releasing lock "refresh_cache-0e5caaed-20ff-40bd-b0cf-016ac18642cf" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.411776] env[62740]: DEBUG nova.compute.manager [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 848.411906] env[62740]: DEBUG nova.compute.manager [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 848.412118] env[62740]: DEBUG nova.network.neutron [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 848.437228] env[62740]: DEBUG nova.network.neutron [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.437733] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 99ac81e3fbb549c1b90d371cffff05a1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.444493] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99ac81e3fbb549c1b90d371cffff05a1 [ 848.446223] env[62740]: DEBUG nova.network.neutron [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.446223] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 52b0083c25654b93863ca11b50e74892 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.457475] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52b0083c25654b93863ca11b50e74892 [ 848.458057] env[62740]: INFO nova.compute.manager [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Took 0.05 seconds to deallocate network for instance. [ 848.460087] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 1b358bca2a5047a8b211c3235ab27fdd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.505977] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b358bca2a5047a8b211c3235ab27fdd [ 848.507435] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg e94eaa1d5d0f46a3a3d43cd8e846c5f5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.519667] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e94eaa1d5d0f46a3a3d43cd8e846c5f5 [ 848.551229] env[62740]: INFO nova.scheduler.client.report [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Deleted allocations for instance 0e5caaed-20ff-40bd-b0cf-016ac18642cf [ 848.551542] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6eb0abd9-e98c-40b9-802c-dc09abd95be5 tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Lock "0e5caaed-20ff-40bd-b0cf-016ac18642cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.185s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.552223] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4951cca0-017f-4011-8501-69ae0db3c99a tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 96edb283ed184b9e90749d1934df77e8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.553046] env[62740]: DEBUG oslo_concurrency.lockutils [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Lock "0e5caaed-20ff-40bd-b0cf-016ac18642cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.135s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.553323] env[62740]: DEBUG oslo_concurrency.lockutils [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Acquiring lock "0e5caaed-20ff-40bd-b0cf-016ac18642cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.553554] env[62740]: DEBUG oslo_concurrency.lockutils [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Lock "0e5caaed-20ff-40bd-b0cf-016ac18642cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.553766] env[62740]: DEBUG oslo_concurrency.lockutils [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Lock "0e5caaed-20ff-40bd-b0cf-016ac18642cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.555565] env[62740]: INFO nova.compute.manager [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Terminating instance [ 848.557335] env[62740]: DEBUG oslo_concurrency.lockutils [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Acquiring lock "refresh_cache-0e5caaed-20ff-40bd-b0cf-016ac18642cf" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.557561] env[62740]: DEBUG oslo_concurrency.lockutils [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Acquired lock "refresh_cache-0e5caaed-20ff-40bd-b0cf-016ac18642cf" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.557746] env[62740]: DEBUG nova.network.neutron [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 848.558161] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 05c988112f8d44dc8235a93f4b015dec in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.566877] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05c988112f8d44dc8235a93f4b015dec [ 848.571498] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96edb283ed184b9e90749d1934df77e8 [ 848.571977] env[62740]: DEBUG nova.compute.manager [None req-4951cca0-017f-4011-8501-69ae0db3c99a tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: b9840eca-ec5f-4a8c-9bdf-1212e2640e5c] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 848.573705] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4951cca0-017f-4011-8501-69ae0db3c99a tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 9d854dd88f4442b7a9c63c622d9c51a9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.605036] env[62740]: DEBUG nova.network.neutron [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.610730] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d854dd88f4442b7a9c63c622d9c51a9 [ 848.611349] env[62740]: DEBUG nova.compute.manager [None req-4951cca0-017f-4011-8501-69ae0db3c99a tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: b9840eca-ec5f-4a8c-9bdf-1212e2640e5c] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 848.611694] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4951cca0-017f-4011-8501-69ae0db3c99a tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg cb71a9a4d80c427ebb7cfb77ebaa152f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.628349] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb71a9a4d80c427ebb7cfb77ebaa152f [ 848.641235] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4951cca0-017f-4011-8501-69ae0db3c99a tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Lock "b9840eca-ec5f-4a8c-9bdf-1212e2640e5c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.501s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.642130] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2b9d9caf-5b29-49ed-8567-a5e8da717dad tempest-ServerActionsTestOtherA-296534188 tempest-ServerActionsTestOtherA-296534188-project-member] Expecting reply to msg 3b2cbe2a615247a0b57524477c35c870 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.654510] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b2cbe2a615247a0b57524477c35c870 [ 848.655116] env[62740]: DEBUG nova.compute.manager [None req-2b9d9caf-5b29-49ed-8567-a5e8da717dad tempest-ServerActionsTestOtherA-296534188 tempest-ServerActionsTestOtherA-296534188-project-member] [instance: ec703551-5c8d-43bb-b727-709aeeeac9a4] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 848.657077] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2b9d9caf-5b29-49ed-8567-a5e8da717dad tempest-ServerActionsTestOtherA-296534188 tempest-ServerActionsTestOtherA-296534188-project-member] Expecting reply to msg 1b4d8df71ed74eedadccd2c18a103096 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.679955] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b4d8df71ed74eedadccd2c18a103096 [ 848.680566] env[62740]: DEBUG nova.compute.manager [None req-2b9d9caf-5b29-49ed-8567-a5e8da717dad tempest-ServerActionsTestOtherA-296534188 tempest-ServerActionsTestOtherA-296534188-project-member] [instance: ec703551-5c8d-43bb-b727-709aeeeac9a4] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 848.680911] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2b9d9caf-5b29-49ed-8567-a5e8da717dad tempest-ServerActionsTestOtherA-296534188 tempest-ServerActionsTestOtherA-296534188-project-member] Expecting reply to msg 58a7b3be4e9f441f80674da913ec2cb8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.691938] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58a7b3be4e9f441f80674da913ec2cb8 [ 848.703862] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2b9d9caf-5b29-49ed-8567-a5e8da717dad tempest-ServerActionsTestOtherA-296534188 tempest-ServerActionsTestOtherA-296534188-project-member] Lock "ec703551-5c8d-43bb-b727-709aeeeac9a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.212s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.704443] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 16bfacc27d14492fa2bd98aed6fbefa7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.714067] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16bfacc27d14492fa2bd98aed6fbefa7 [ 848.714974] env[62740]: DEBUG nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 848.720020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 814ac65d9dee4bd396a94218861fbe69 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.752947] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 814ac65d9dee4bd396a94218861fbe69 [ 848.771366] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.771671] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.773811] env[62740]: INFO nova.compute.claims [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 848.775747] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 2b79ea2474224d188d5e4b8d37e5917d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.820108] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b79ea2474224d188d5e4b8d37e5917d [ 848.820108] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 34c5f7469a2e4cf99a8dba618f420fb5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.828742] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34c5f7469a2e4cf99a8dba618f420fb5 [ 848.873212] env[62740]: DEBUG nova.network.neutron [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.873671] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 0559b9fa788b4e3db51ebb87bc1e71af in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.887228] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0559b9fa788b4e3db51ebb87bc1e71af [ 848.887924] env[62740]: DEBUG oslo_concurrency.lockutils [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Releasing lock "refresh_cache-0e5caaed-20ff-40bd-b0cf-016ac18642cf" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.888371] env[62740]: DEBUG nova.compute.manager [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 848.890493] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 848.890493] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-10b14a89-09b1-4056-ac36-22721d5464fb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.905561] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efcce68-1557-4864-8ce8-81f0d24d32c3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.939702] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0e5caaed-20ff-40bd-b0cf-016ac18642cf could not be found. [ 848.939899] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 848.940106] env[62740]: INFO nova.compute.manager [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Took 0.05 seconds to destroy the instance on the hypervisor. [ 848.940343] env[62740]: DEBUG oslo.service.loopingcall [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 848.943359] env[62740]: DEBUG nova.compute.manager [-] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 848.943464] env[62740]: DEBUG nova.network.neutron [-] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 848.978351] env[62740]: DEBUG nova.network.neutron [-] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.978910] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 67624fefc07d452897c61b815af9cf4b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.987634] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67624fefc07d452897c61b815af9cf4b [ 848.987879] env[62740]: DEBUG nova.network.neutron [-] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.988358] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c4715242467a4ec8b58536d1084ceb49 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 848.999022] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4715242467a4ec8b58536d1084ceb49 [ 848.999601] env[62740]: INFO nova.compute.manager [-] [instance: 0e5caaed-20ff-40bd-b0cf-016ac18642cf] Took 0.06 seconds to deallocate network for instance. [ 849.004692] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 51e5da4fd40d49f498223edbcda3b576 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 849.037292] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51e5da4fd40d49f498223edbcda3b576 [ 849.056168] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg 7401e88cf890407ebf6298e337809d8f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 849.103249] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7401e88cf890407ebf6298e337809d8f [ 849.106276] env[62740]: DEBUG oslo_concurrency.lockutils [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Lock "0e5caaed-20ff-40bd-b0cf-016ac18642cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.553s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.106539] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-43d2d1b4-a2bc-409b-ab48-2751e1143a1b tempest-ServerRescueNegativeTestJSON-438793456 tempest-ServerRescueNegativeTestJSON-438793456-project-member] Expecting reply to msg d829ccefb60c480fae4a3b77692c4f03 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 849.126671] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d829ccefb60c480fae4a3b77692c4f03 [ 849.289103] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abac4903-12c0-4f86-bb95-d6a3c6387680 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.297243] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf49989-45c2-4e94-9d41-cfea4f8f7697 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.327916] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c83b77f-4608-452d-859c-ef90fb166894 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.335408] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10dc33e0-29a2-4608-b84f-72984e26933c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.349749] env[62740]: DEBUG nova.compute.provider_tree [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.350265] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg b06e612b099244ca8dd499404b06a9ef in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 849.359506] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b06e612b099244ca8dd499404b06a9ef [ 849.360490] env[62740]: DEBUG nova.scheduler.client.report [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 849.364049] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 52b5233576cf43deadfd7b7fca04c0c5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 849.378477] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52b5233576cf43deadfd7b7fca04c0c5 [ 849.379227] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.608s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.379745] env[62740]: DEBUG nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 849.381459] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 792dde61170c432981dc4028c3949cbc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 849.415596] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 792dde61170c432981dc4028c3949cbc [ 849.417546] env[62740]: DEBUG nova.compute.utils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 849.418341] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 9ea6359f79204d7ba5da89b48e6402b2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 849.419307] env[62740]: DEBUG nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 849.419542] env[62740]: DEBUG nova.network.neutron [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 849.428787] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ea6359f79204d7ba5da89b48e6402b2 [ 849.429367] env[62740]: DEBUG nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 849.431022] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 5dae7422e6f44622903f59a6e9f59fda in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 849.459740] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5dae7422e6f44622903f59a6e9f59fda [ 849.462428] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 069e6d09bfb043d896226b5bf1ad2779 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 849.499423] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 069e6d09bfb043d896226b5bf1ad2779 [ 849.500751] env[62740]: DEBUG nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 849.534203] env[62740]: DEBUG nova.virt.hardware [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 849.534511] env[62740]: DEBUG nova.virt.hardware [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 849.534666] env[62740]: DEBUG nova.virt.hardware [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 849.534858] env[62740]: DEBUG nova.virt.hardware [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 849.535015] env[62740]: DEBUG nova.virt.hardware [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 849.535265] env[62740]: DEBUG nova.virt.hardware [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 849.535530] env[62740]: DEBUG nova.virt.hardware [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 849.535744] env[62740]: DEBUG nova.virt.hardware [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 849.535955] env[62740]: DEBUG nova.virt.hardware [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 849.536144] env[62740]: DEBUG nova.virt.hardware [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 849.536361] env[62740]: DEBUG nova.virt.hardware [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 849.537542] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b062142-8d1e-4bc0-9eaf-64cf3f2f7c05 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.546605] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1dacd1-6f19-4bf8-9211-bd6b1816b0c2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.569187] env[62740]: DEBUG nova.policy [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54ad12920f2546ddb32d5ae21f214ced', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a1c64af2425450c83bbecfd1e409fb4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 850.880659] env[62740]: DEBUG nova.network.neutron [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Successfully created port: f032ce1b-97ff-48ef-b917-d083c9a4aba8 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 853.383375] env[62740]: DEBUG nova.network.neutron [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Successfully updated port: f032ce1b-97ff-48ef-b917-d083c9a4aba8 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 853.383874] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg a4ebaf331f22461ab30701679fce5c3f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 853.397176] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4ebaf331f22461ab30701679fce5c3f [ 853.397506] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Acquiring lock "refresh_cache-00085df9-ce61-4ccc-8ecf-16956109eb8f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.397631] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Acquired lock "refresh_cache-00085df9-ce61-4ccc-8ecf-16956109eb8f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.397775] env[62740]: DEBUG nova.network.neutron [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 853.398170] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 7ba268c8f11d49a3b5dd7dcfce3ad6d6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 853.405820] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ba268c8f11d49a3b5dd7dcfce3ad6d6 [ 853.466909] env[62740]: DEBUG nova.network.neutron [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 853.851916] env[62740]: DEBUG nova.network.neutron [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Updating instance_info_cache with network_info: [{"id": "f032ce1b-97ff-48ef-b917-d083c9a4aba8", "address": "fa:16:3e:48:ca:3f", "network": {"id": "cb0454a2-bda6-49dd-b924-abd26c6ea5d4", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-278992897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a1c64af2425450c83bbecfd1e409fb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf032ce1b-97", "ovs_interfaceid": "f032ce1b-97ff-48ef-b917-d083c9a4aba8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.852483] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 089eb3c1daf74c2189ff206a00656599 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 853.868191] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 089eb3c1daf74c2189ff206a00656599 [ 853.868874] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Releasing lock "refresh_cache-00085df9-ce61-4ccc-8ecf-16956109eb8f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.872096] env[62740]: DEBUG nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Instance network_info: |[{"id": "f032ce1b-97ff-48ef-b917-d083c9a4aba8", "address": "fa:16:3e:48:ca:3f", "network": {"id": "cb0454a2-bda6-49dd-b924-abd26c6ea5d4", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-278992897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a1c64af2425450c83bbecfd1e409fb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf032ce1b-97", "ovs_interfaceid": "f032ce1b-97ff-48ef-b917-d083c9a4aba8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 853.872508] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:ca:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '191a5351-07d5-4138-b855-206f48fc4375', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f032ce1b-97ff-48ef-b917-d083c9a4aba8', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 853.877552] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Creating folder: Project (8a1c64af2425450c83bbecfd1e409fb4). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 853.879021] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-141e9c0b-5a6c-495e-a95d-1da313de22ee {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.889412] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Created folder: Project (8a1c64af2425450c83bbecfd1e409fb4) in parent group-v156037. [ 853.889819] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Creating folder: Instances. Parent ref: group-v156094. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 853.889819] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c20d7a70-9e57-4707-9d1b-a8b368f887e3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.899966] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Created folder: Instances in parent group-v156094. [ 853.900211] env[62740]: DEBUG oslo.service.loopingcall [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 853.903022] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 853.903022] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3737ecd0-c61d-4732-ad61-bda47b4d314d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.926581] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 853.926581] env[62740]: value = "task-640127" [ 853.926581] env[62740]: _type = "Task" [ 853.926581] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.939662] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640127, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.110994] env[62740]: DEBUG nova.compute.manager [req-106c3081-5ce8-4767-b34d-4c5fb4a95586 req-8037a068-490b-48ae-808a-099b33864b6a service nova] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Received event network-vif-plugged-f032ce1b-97ff-48ef-b917-d083c9a4aba8 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 854.111296] env[62740]: DEBUG oslo_concurrency.lockutils [req-106c3081-5ce8-4767-b34d-4c5fb4a95586 req-8037a068-490b-48ae-808a-099b33864b6a service nova] Acquiring lock "00085df9-ce61-4ccc-8ecf-16956109eb8f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.111540] env[62740]: DEBUG oslo_concurrency.lockutils [req-106c3081-5ce8-4767-b34d-4c5fb4a95586 req-8037a068-490b-48ae-808a-099b33864b6a service nova] Lock "00085df9-ce61-4ccc-8ecf-16956109eb8f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.111729] env[62740]: DEBUG oslo_concurrency.lockutils [req-106c3081-5ce8-4767-b34d-4c5fb4a95586 req-8037a068-490b-48ae-808a-099b33864b6a service nova] Lock "00085df9-ce61-4ccc-8ecf-16956109eb8f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.111910] env[62740]: DEBUG nova.compute.manager [req-106c3081-5ce8-4767-b34d-4c5fb4a95586 req-8037a068-490b-48ae-808a-099b33864b6a service nova] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] No waiting events found dispatching network-vif-plugged-f032ce1b-97ff-48ef-b917-d083c9a4aba8 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 854.112622] env[62740]: WARNING nova.compute.manager [req-106c3081-5ce8-4767-b34d-4c5fb4a95586 req-8037a068-490b-48ae-808a-099b33864b6a service nova] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Received unexpected event network-vif-plugged-f032ce1b-97ff-48ef-b917-d083c9a4aba8 for instance with vm_state building and task_state spawning. [ 854.437064] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640127, 'name': CreateVM_Task, 'duration_secs': 0.296865} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.437572] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 854.437787] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.437963] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.438325] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 854.438926] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63984107-5f0a-4891-9c42-200a9c9e7727 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.444449] env[62740]: DEBUG oslo_vmware.api [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Waiting for the task: (returnval){ [ 854.444449] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52ac305a-4855-a766-7809-7440e36398d6" [ 854.444449] env[62740]: _type = "Task" [ 854.444449] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.447496] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 54ac8922a2084646bf04ee3300793fed in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 854.459147] env[62740]: DEBUG oslo_vmware.api [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52ac305a-4855-a766-7809-7440e36398d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.460038] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54ac8922a2084646bf04ee3300793fed [ 854.460409] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Acquiring lock "00085df9-ce61-4ccc-8ecf-16956109eb8f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.956826] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.957110] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 854.957355] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.278958] env[62740]: DEBUG nova.compute.manager [req-9433d799-9d00-404c-87df-9519f95f4ed7 req-1cd02e89-9a40-4f69-95ef-4e17095018ad service nova] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Received event network-changed-f032ce1b-97ff-48ef-b917-d083c9a4aba8 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 856.279272] env[62740]: DEBUG nova.compute.manager [req-9433d799-9d00-404c-87df-9519f95f4ed7 req-1cd02e89-9a40-4f69-95ef-4e17095018ad service nova] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Refreshing instance network info cache due to event network-changed-f032ce1b-97ff-48ef-b917-d083c9a4aba8. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 856.279343] env[62740]: DEBUG oslo_concurrency.lockutils [req-9433d799-9d00-404c-87df-9519f95f4ed7 req-1cd02e89-9a40-4f69-95ef-4e17095018ad service nova] Acquiring lock "refresh_cache-00085df9-ce61-4ccc-8ecf-16956109eb8f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.279492] env[62740]: DEBUG oslo_concurrency.lockutils [req-9433d799-9d00-404c-87df-9519f95f4ed7 req-1cd02e89-9a40-4f69-95ef-4e17095018ad service nova] Acquired lock "refresh_cache-00085df9-ce61-4ccc-8ecf-16956109eb8f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.279652] env[62740]: DEBUG nova.network.neutron [req-9433d799-9d00-404c-87df-9519f95f4ed7 req-1cd02e89-9a40-4f69-95ef-4e17095018ad service nova] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Refreshing network info cache for port f032ce1b-97ff-48ef-b917-d083c9a4aba8 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 856.280270] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-9433d799-9d00-404c-87df-9519f95f4ed7 req-1cd02e89-9a40-4f69-95ef-4e17095018ad service nova] Expecting reply to msg 67d6f54aaa3649618a3d701e9ace2b5c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 856.288467] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67d6f54aaa3649618a3d701e9ace2b5c [ 857.015582] env[62740]: DEBUG nova.network.neutron [req-9433d799-9d00-404c-87df-9519f95f4ed7 req-1cd02e89-9a40-4f69-95ef-4e17095018ad service nova] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Updated VIF entry in instance network info cache for port f032ce1b-97ff-48ef-b917-d083c9a4aba8. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 857.015952] env[62740]: DEBUG nova.network.neutron [req-9433d799-9d00-404c-87df-9519f95f4ed7 req-1cd02e89-9a40-4f69-95ef-4e17095018ad service nova] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Updating instance_info_cache with network_info: [{"id": "f032ce1b-97ff-48ef-b917-d083c9a4aba8", "address": "fa:16:3e:48:ca:3f", "network": {"id": "cb0454a2-bda6-49dd-b924-abd26c6ea5d4", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-278992897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a1c64af2425450c83bbecfd1e409fb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf032ce1b-97", "ovs_interfaceid": "f032ce1b-97ff-48ef-b917-d083c9a4aba8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.016533] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-9433d799-9d00-404c-87df-9519f95f4ed7 req-1cd02e89-9a40-4f69-95ef-4e17095018ad service nova] Expecting reply to msg 6060fb274d404299ba306e6f57d73836 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 857.025931] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6060fb274d404299ba306e6f57d73836 [ 857.026646] env[62740]: DEBUG oslo_concurrency.lockutils [req-9433d799-9d00-404c-87df-9519f95f4ed7 req-1cd02e89-9a40-4f69-95ef-4e17095018ad service nova] Releasing lock "refresh_cache-00085df9-ce61-4ccc-8ecf-16956109eb8f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.198392] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Acquiring lock "d6c3ca16-5c7c-41e6-9850-10221603ad2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.198671] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Lock "d6c3ca16-5c7c-41e6-9850-10221603ad2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.677468] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bc639afe-9daf-45b2-bcef-b70a1ea9afae tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Acquiring lock "d60c8e65-1eb3-4017-b28e-8b72b0b4b2e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.677759] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bc639afe-9daf-45b2-bcef-b70a1ea9afae tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Lock "d60c8e65-1eb3-4017-b28e-8b72b0b4b2e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.334727] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1aa24f57-b11a-40f0-91cd-4be53c042c2b tempest-FloatingIPsAssociationNegativeTestJSON-1331363627 tempest-FloatingIPsAssociationNegativeTestJSON-1331363627-project-member] Acquiring lock "6531eee8-d8ec-4a9d-911c-d7d9b88baf19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.335055] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1aa24f57-b11a-40f0-91cd-4be53c042c2b tempest-FloatingIPsAssociationNegativeTestJSON-1331363627 tempest-FloatingIPsAssociationNegativeTestJSON-1331363627-project-member] Lock "6531eee8-d8ec-4a9d-911c-d7d9b88baf19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.701767] env[62740]: WARNING oslo_vmware.rw_handles [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 878.701767] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 878.701767] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 878.701767] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 878.701767] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 878.701767] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 878.701767] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 878.701767] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 878.701767] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 878.701767] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 878.701767] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 878.701767] env[62740]: ERROR oslo_vmware.rw_handles [ 878.702648] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/04c33d33-a6b6-483c-b3c9-fe0bfd28c513/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 878.704782] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 878.704782] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Copying Virtual Disk [datastore2] vmware_temp/04c33d33-a6b6-483c-b3c9-fe0bfd28c513/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/04c33d33-a6b6-483c-b3c9-fe0bfd28c513/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 878.704968] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1cb11024-7b71-4d85-a3fb-eda59b4f0ba7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.713619] env[62740]: DEBUG oslo_vmware.api [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Waiting for the task: (returnval){ [ 878.713619] env[62740]: value = "task-640128" [ 878.713619] env[62740]: _type = "Task" [ 878.713619] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.721423] env[62740]: DEBUG oslo_vmware.api [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Task: {'id': task-640128, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.226922] env[62740]: DEBUG oslo_vmware.exceptions [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 879.227244] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.227864] env[62740]: ERROR nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 879.227864] env[62740]: Faults: ['InvalidArgument'] [ 879.227864] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Traceback (most recent call last): [ 879.227864] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 879.227864] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] yield resources [ 879.227864] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 879.227864] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] self.driver.spawn(context, instance, image_meta, [ 879.227864] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 879.227864] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 879.227864] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 879.227864] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] self._fetch_image_if_missing(context, vi) [ 879.227864] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 879.228337] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] image_cache(vi, tmp_image_ds_loc) [ 879.228337] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 879.228337] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] vm_util.copy_virtual_disk( [ 879.228337] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 879.228337] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] session._wait_for_task(vmdk_copy_task) [ 879.228337] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 879.228337] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] return self.wait_for_task(task_ref) [ 879.228337] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 879.228337] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] return evt.wait() [ 879.228337] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 879.228337] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] result = hub.switch() [ 879.228337] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 879.228337] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] return self.greenlet.switch() [ 879.228934] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 879.228934] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] self.f(*self.args, **self.kw) [ 879.228934] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 879.228934] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] raise exceptions.translate_fault(task_info.error) [ 879.228934] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 879.228934] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Faults: ['InvalidArgument'] [ 879.228934] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] [ 879.228934] env[62740]: INFO nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Terminating instance [ 879.229870] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.230092] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 879.230743] env[62740]: DEBUG nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 879.230932] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 879.231176] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00b4cd3a-a4ba-4fc7-b7d0-b4f1551930bb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.234983] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9337067d-c654-4a45-809f-ba7586f85599 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.242201] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 879.242472] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8e24b50-1e39-4515-b70c-b2781752233f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.244915] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 879.245162] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 879.246213] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-522a30af-e9e9-45a0-84b2-6f4f1afc3741 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.251399] env[62740]: DEBUG oslo_vmware.api [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Waiting for the task: (returnval){ [ 879.251399] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52e433f0-fa21-d6ae-f214-6a728c0066a8" [ 879.251399] env[62740]: _type = "Task" [ 879.251399] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.259760] env[62740]: DEBUG oslo_vmware.api [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52e433f0-fa21-d6ae-f214-6a728c0066a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.312849] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 879.313106] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 879.313301] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Deleting the datastore file [datastore2] 3102cc87-df1a-4de8-bfdb-9b904f40ea2e {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.313557] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fdaa2805-f789-48f2-9eda-67f42f05324b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.321349] env[62740]: DEBUG oslo_vmware.api [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Waiting for the task: (returnval){ [ 879.321349] env[62740]: value = "task-640130" [ 879.321349] env[62740]: _type = "Task" [ 879.321349] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.329060] env[62740]: DEBUG oslo_vmware.api [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Task: {'id': task-640130, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.763978] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 879.764346] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Creating directory with path [datastore2] vmware_temp/0a19e8b9-0bfb-4d8c-86b9-780d521c9f1a/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 879.764518] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ad12b7f-ea52-4c9d-a893-b0f38b9c2255 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.777358] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Created directory with path [datastore2] vmware_temp/0a19e8b9-0bfb-4d8c-86b9-780d521c9f1a/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 879.778208] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Fetch image to [datastore2] vmware_temp/0a19e8b9-0bfb-4d8c-86b9-780d521c9f1a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 879.778493] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/0a19e8b9-0bfb-4d8c-86b9-780d521c9f1a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 879.779365] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f128869b-e293-4503-8149-7711496245c0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.786748] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe3d7d6-711b-4593-b94c-a6f353a9e5c9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.796913] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78d6a05-792b-402a-8e06-612f6f492620 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.834024] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705e012a-6067-4bc3-a90c-82970dfdc50f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.840824] env[62740]: DEBUG oslo_vmware.api [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Task: {'id': task-640130, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097262} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.842444] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 879.842637] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 879.842871] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 879.842978] env[62740]: INFO nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Took 0.61 seconds to destroy the instance on the hypervisor. [ 879.844880] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c130383a-8a38-4f37-9b27-4d509b083bc9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.847855] env[62740]: DEBUG nova.compute.claims [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 879.848030] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.848287] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.850265] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 663e85f226fd46759ff99d17dba893e9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 879.871591] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 879.895152] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 663e85f226fd46759ff99d17dba893e9 [ 879.979949] env[62740]: DEBUG oslo_vmware.rw_handles [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0a19e8b9-0bfb-4d8c-86b9-780d521c9f1a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 880.043351] env[62740]: DEBUG oslo_vmware.rw_handles [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 880.043351] env[62740]: DEBUG oslo_vmware.rw_handles [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0a19e8b9-0bfb-4d8c-86b9-780d521c9f1a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 880.295510] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4712f9b7-98ee-441a-8fe0-5c483943c1b3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.303305] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a59d4f-8d21-4229-83eb-22aae0ccc912 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.334059] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2c8d11-d192-408c-b5af-9ed35652ddcb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.340593] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1ee651-10f0-4f6d-b7fd-bca9b6ed0824 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.353574] env[62740]: DEBUG nova.compute.provider_tree [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.354095] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 0674d4dc204447c6b7ce30707ea8e025 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 880.361444] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0674d4dc204447c6b7ce30707ea8e025 [ 880.362397] env[62740]: DEBUG nova.scheduler.client.report [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 880.364847] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg c6c9220b0faa4daa909547ccd1fa6db4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 880.377872] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6c9220b0faa4daa909547ccd1fa6db4 [ 880.378681] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.530s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.379208] env[62740]: ERROR nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 880.379208] env[62740]: Faults: ['InvalidArgument'] [ 880.379208] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Traceback (most recent call last): [ 880.379208] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 880.379208] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] self.driver.spawn(context, instance, image_meta, [ 880.379208] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 880.379208] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 880.379208] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 880.379208] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] self._fetch_image_if_missing(context, vi) [ 880.379208] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 880.379208] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] image_cache(vi, tmp_image_ds_loc) [ 880.379208] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 880.379646] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] vm_util.copy_virtual_disk( [ 880.379646] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 880.379646] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] session._wait_for_task(vmdk_copy_task) [ 880.379646] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 880.379646] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] return self.wait_for_task(task_ref) [ 880.379646] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 880.379646] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] return evt.wait() [ 880.379646] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 880.379646] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] result = hub.switch() [ 880.379646] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 880.379646] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] return self.greenlet.switch() [ 880.379646] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 880.379646] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] self.f(*self.args, **self.kw) [ 880.380088] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 880.380088] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] raise exceptions.translate_fault(task_info.error) [ 880.380088] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 880.380088] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Faults: ['InvalidArgument'] [ 880.380088] env[62740]: ERROR nova.compute.manager [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] [ 880.380088] env[62740]: DEBUG nova.compute.utils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 880.381383] env[62740]: DEBUG nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Build of instance 3102cc87-df1a-4de8-bfdb-9b904f40ea2e was re-scheduled: A specified parameter was not correct: fileType [ 880.381383] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 880.381754] env[62740]: DEBUG nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 880.381927] env[62740]: DEBUG nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 880.382108] env[62740]: DEBUG nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 880.382273] env[62740]: DEBUG nova.network.neutron [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 880.743954] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 6326b04d2df645acb0f68ef0d003ef4d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 880.755469] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6326b04d2df645acb0f68ef0d003ef4d [ 880.756060] env[62740]: DEBUG nova.network.neutron [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.756540] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 4acd73bfbd6342e19e071900161f0f37 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 880.767034] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4acd73bfbd6342e19e071900161f0f37 [ 880.767638] env[62740]: INFO nova.compute.manager [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Took 0.39 seconds to deallocate network for instance. [ 880.769823] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 87a25c18311a4721a1efab5e1c7f37b0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 880.814489] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87a25c18311a4721a1efab5e1c7f37b0 [ 880.817387] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 6853856103ea40c6afb3f347a0394c87 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 880.856104] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6853856103ea40c6afb3f347a0394c87 [ 880.884468] env[62740]: INFO nova.scheduler.client.report [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Deleted allocations for instance 3102cc87-df1a-4de8-bfdb-9b904f40ea2e [ 880.891838] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 0f4d36c7fe7448bcad8b6d50f86de98c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 880.909518] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f4d36c7fe7448bcad8b6d50f86de98c [ 880.910156] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e2a901f-68c8-483d-a2c9-a6dd28e6bc20 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Lock "3102cc87-df1a-4de8-bfdb-9b904f40ea2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 286.430s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.910731] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5796bde4-492e-4f25-ac8c-fcab9c3edbaf tempest-ServerDiagnosticsV248Test-1188391784 tempest-ServerDiagnosticsV248Test-1188391784-project-member] Expecting reply to msg e8db9192ae8c486487572fd303d414fc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 880.911512] env[62740]: DEBUG oslo_concurrency.lockutils [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Lock "3102cc87-df1a-4de8-bfdb-9b904f40ea2e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 84.612s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.911730] env[62740]: DEBUG oslo_concurrency.lockutils [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Acquiring lock "3102cc87-df1a-4de8-bfdb-9b904f40ea2e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.911932] env[62740]: DEBUG oslo_concurrency.lockutils [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Lock "3102cc87-df1a-4de8-bfdb-9b904f40ea2e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.912110] env[62740]: DEBUG oslo_concurrency.lockutils [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Lock "3102cc87-df1a-4de8-bfdb-9b904f40ea2e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.914340] env[62740]: INFO nova.compute.manager [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Terminating instance [ 880.916065] env[62740]: DEBUG nova.compute.manager [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 880.916265] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 880.916735] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-20946f07-3c02-4fb3-b314-03515341814d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.923783] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8db9192ae8c486487572fd303d414fc [ 880.927357] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb07c77-01f6-4bbf-883d-d926bd37874e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.938027] env[62740]: DEBUG nova.compute.manager [None req-5796bde4-492e-4f25-ac8c-fcab9c3edbaf tempest-ServerDiagnosticsV248Test-1188391784 tempest-ServerDiagnosticsV248Test-1188391784-project-member] [instance: e5b0daa1-6745-48ad-8e69-6c7362bac085] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 880.939599] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5796bde4-492e-4f25-ac8c-fcab9c3edbaf tempest-ServerDiagnosticsV248Test-1188391784 tempest-ServerDiagnosticsV248Test-1188391784-project-member] Expecting reply to msg b8b368c2a8fe4471bc2a86c005552aeb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 880.958026] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3102cc87-df1a-4de8-bfdb-9b904f40ea2e could not be found. [ 880.958026] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 880.958160] env[62740]: INFO nova.compute.manager [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 880.958411] env[62740]: DEBUG oslo.service.loopingcall [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.958588] env[62740]: DEBUG nova.compute.manager [-] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 880.959054] env[62740]: DEBUG nova.network.neutron [-] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 880.967705] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8b368c2a8fe4471bc2a86c005552aeb [ 880.968467] env[62740]: DEBUG nova.compute.manager [None req-5796bde4-492e-4f25-ac8c-fcab9c3edbaf tempest-ServerDiagnosticsV248Test-1188391784 tempest-ServerDiagnosticsV248Test-1188391784-project-member] [instance: e5b0daa1-6745-48ad-8e69-6c7362bac085] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 880.968938] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5796bde4-492e-4f25-ac8c-fcab9c3edbaf tempest-ServerDiagnosticsV248Test-1188391784 tempest-ServerDiagnosticsV248Test-1188391784-project-member] Expecting reply to msg 6346c63d0e6946bc801b3adad8544299 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 880.980630] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6346c63d0e6946bc801b3adad8544299 [ 880.985164] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 30c003e283f449b48cc420bc882b58d6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 880.993756] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5796bde4-492e-4f25-ac8c-fcab9c3edbaf tempest-ServerDiagnosticsV248Test-1188391784 tempest-ServerDiagnosticsV248Test-1188391784-project-member] Lock "e5b0daa1-6745-48ad-8e69-6c7362bac085" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.451s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.994213] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d4d90b3a-0418-4a35-a0e0-36e4fae38c8a tempest-ServersTestJSON-1664078512 tempest-ServersTestJSON-1664078512-project-member] Expecting reply to msg 39f37d998fc54bb8aa8e495ef9892862 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 880.995836] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30c003e283f449b48cc420bc882b58d6 [ 880.996255] env[62740]: DEBUG nova.network.neutron [-] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.996623] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ca6c2bd55242473780eba6e360f7e1bb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.006151] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39f37d998fc54bb8aa8e495ef9892862 [ 881.006551] env[62740]: DEBUG nova.compute.manager [None req-d4d90b3a-0418-4a35-a0e0-36e4fae38c8a tempest-ServersTestJSON-1664078512 tempest-ServersTestJSON-1664078512-project-member] [instance: e8bb18d5-207c-48fb-b7e5-06b72ce61b4f] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 881.008439] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d4d90b3a-0418-4a35-a0e0-36e4fae38c8a tempest-ServersTestJSON-1664078512 tempest-ServersTestJSON-1664078512-project-member] Expecting reply to msg ad19ed1d278e4d028d1a484293f93ca0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.010700] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca6c2bd55242473780eba6e360f7e1bb [ 881.011168] env[62740]: INFO nova.compute.manager [-] [instance: 3102cc87-df1a-4de8-bfdb-9b904f40ea2e] Took 0.05 seconds to deallocate network for instance. [ 881.014824] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 826d3df8ffd04b888936943cd6ed2eda in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.043510] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 826d3df8ffd04b888936943cd6ed2eda [ 881.046685] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad19ed1d278e4d028d1a484293f93ca0 [ 881.047414] env[62740]: DEBUG nova.compute.manager [None req-d4d90b3a-0418-4a35-a0e0-36e4fae38c8a tempest-ServersTestJSON-1664078512 tempest-ServersTestJSON-1664078512-project-member] [instance: e8bb18d5-207c-48fb-b7e5-06b72ce61b4f] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 881.047569] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d4d90b3a-0418-4a35-a0e0-36e4fae38c8a tempest-ServersTestJSON-1664078512 tempest-ServersTestJSON-1664078512-project-member] Expecting reply to msg f9e20aba31fd4b0985c21c0e0e4275ae in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.056854] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9e20aba31fd4b0985c21c0e0e4275ae [ 881.058876] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 7257354351c3488fb1a4dccda90e3187 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.074462] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d4d90b3a-0418-4a35-a0e0-36e4fae38c8a tempest-ServersTestJSON-1664078512 tempest-ServersTestJSON-1664078512-project-member] Lock "e8bb18d5-207c-48fb-b7e5-06b72ce61b4f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.108s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.075169] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 9409380516594634b65424e7bce25159 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.084421] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9409380516594634b65424e7bce25159 [ 881.085077] env[62740]: DEBUG nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 881.086886] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 2bc4b7f73f854886b395f07e4679e9fc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.122217] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7257354351c3488fb1a4dccda90e3187 [ 881.129721] env[62740]: DEBUG oslo_concurrency.lockutils [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Lock "3102cc87-df1a-4de8-bfdb-9b904f40ea2e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.218s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.130677] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-42013f34-c1d2-45cb-b492-4034d9971d86 tempest-ImagesNegativeTestJSON-581676109 tempest-ImagesNegativeTestJSON-581676109-project-member] Expecting reply to msg 543352cf14624ae2ad0c0c5be26717d2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.132028] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bc4b7f73f854886b395f07e4679e9fc [ 881.141426] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 543352cf14624ae2ad0c0c5be26717d2 [ 881.149819] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.150144] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.151625] env[62740]: INFO nova.compute.claims [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 881.153237] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 483962d9bf5d4e35a621ee036c76fd50 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.188061] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 483962d9bf5d4e35a621ee036c76fd50 [ 881.189857] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 90863c7dd62d4bda9ef10ee23662d818 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.199801] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90863c7dd62d4bda9ef10ee23662d818 [ 881.515541] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ec23a1-26cd-4797-94a6-9a699edfb7e0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.523464] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95fa8663-3c17-4dde-94b2-35772e69cc69 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.552763] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca434f3-c870-48bc-8030-284ef74611c6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.560875] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3575ff60-0e69-4108-8983-7973407af3a0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.574749] env[62740]: DEBUG nova.compute.provider_tree [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.575280] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 5d662ee864ac4b6183c29236dbbf4f55 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.584030] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d662ee864ac4b6183c29236dbbf4f55 [ 881.584400] env[62740]: DEBUG nova.scheduler.client.report [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 881.586811] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg d8d607239bb145ddbb2a6f5bb36e9662 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.600124] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8d607239bb145ddbb2a6f5bb36e9662 [ 881.600918] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.451s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.601405] env[62740]: DEBUG nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 881.604918] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 729f6d0999be4f5eb8a0e03a3c8db537 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.641030] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 729f6d0999be4f5eb8a0e03a3c8db537 [ 881.641718] env[62740]: DEBUG nova.compute.utils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 881.642335] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 46a114d8fd6b4b6dba9355586e184a9e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.643203] env[62740]: DEBUG nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 881.643351] env[62740]: DEBUG nova.network.neutron [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 881.651953] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46a114d8fd6b4b6dba9355586e184a9e [ 881.652481] env[62740]: DEBUG nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 881.654132] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg dabb64dcc6ab41248cb75d6aaa2ebd81 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.682767] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dabb64dcc6ab41248cb75d6aaa2ebd81 [ 881.685879] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 517a5d567b2b4163a188dbd2e3df5a9a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 881.699500] env[62740]: DEBUG nova.policy [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c15c5ae832b449b94a7b525e4cd49ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6ea572e12e6d45148deb1f276f2d389b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 881.719826] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 517a5d567b2b4163a188dbd2e3df5a9a [ 881.721089] env[62740]: DEBUG nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 881.747764] env[62740]: DEBUG nova.virt.hardware [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 881.748024] env[62740]: DEBUG nova.virt.hardware [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 881.748192] env[62740]: DEBUG nova.virt.hardware [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 881.748561] env[62740]: DEBUG nova.virt.hardware [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 881.748736] env[62740]: DEBUG nova.virt.hardware [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 881.748894] env[62740]: DEBUG nova.virt.hardware [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 881.749235] env[62740]: DEBUG nova.virt.hardware [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 881.749454] env[62740]: DEBUG nova.virt.hardware [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 881.749640] env[62740]: DEBUG nova.virt.hardware [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 881.749812] env[62740]: DEBUG nova.virt.hardware [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 881.749989] env[62740]: DEBUG nova.virt.hardware [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 881.750978] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0ca164-1d0e-4e87-80da-0627a1463878 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.759938] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220cbeac-506b-46a5-803d-6ac3ff32ab53 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.124947] env[62740]: DEBUG nova.network.neutron [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Successfully created port: d29d7f00-6d02-4442-acbf-e91062aacfd5 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 883.029326] env[62740]: DEBUG nova.network.neutron [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Successfully updated port: d29d7f00-6d02-4442-acbf-e91062aacfd5 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 883.029326] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg ab2062ca0fa241f0813bfdf57ef1b845 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 883.038056] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab2062ca0fa241f0813bfdf57ef1b845 [ 883.038820] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Acquiring lock "refresh_cache-697e3884-2ef4-423e-af81-e5d1e94f65a2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.038930] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Acquired lock "refresh_cache-697e3884-2ef4-423e-af81-e5d1e94f65a2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.039092] env[62740]: DEBUG nova.network.neutron [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 883.039484] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 79f78f58d83c4ae5b6f40373dc803a3b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 883.050233] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79f78f58d83c4ae5b6f40373dc803a3b [ 883.052365] env[62740]: DEBUG nova.compute.manager [req-1f7fd5c6-3d2a-4373-abf0-206b624bd907 req-96d9b00d-433e-4c38-9873-43dcd2daebb6 service nova] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Received event network-vif-plugged-d29d7f00-6d02-4442-acbf-e91062aacfd5 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 883.052569] env[62740]: DEBUG oslo_concurrency.lockutils [req-1f7fd5c6-3d2a-4373-abf0-206b624bd907 req-96d9b00d-433e-4c38-9873-43dcd2daebb6 service nova] Acquiring lock "697e3884-2ef4-423e-af81-e5d1e94f65a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.052767] env[62740]: DEBUG oslo_concurrency.lockutils [req-1f7fd5c6-3d2a-4373-abf0-206b624bd907 req-96d9b00d-433e-4c38-9873-43dcd2daebb6 service nova] Lock "697e3884-2ef4-423e-af81-e5d1e94f65a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.052929] env[62740]: DEBUG oslo_concurrency.lockutils [req-1f7fd5c6-3d2a-4373-abf0-206b624bd907 req-96d9b00d-433e-4c38-9873-43dcd2daebb6 service nova] Lock "697e3884-2ef4-423e-af81-e5d1e94f65a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.053103] env[62740]: DEBUG nova.compute.manager [req-1f7fd5c6-3d2a-4373-abf0-206b624bd907 req-96d9b00d-433e-4c38-9873-43dcd2daebb6 service nova] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] No waiting events found dispatching network-vif-plugged-d29d7f00-6d02-4442-acbf-e91062aacfd5 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 883.053261] env[62740]: WARNING nova.compute.manager [req-1f7fd5c6-3d2a-4373-abf0-206b624bd907 req-96d9b00d-433e-4c38-9873-43dcd2daebb6 service nova] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Received unexpected event network-vif-plugged-d29d7f00-6d02-4442-acbf-e91062aacfd5 for instance with vm_state building and task_state spawning. [ 883.109295] env[62740]: DEBUG nova.network.neutron [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 883.353895] env[62740]: DEBUG nova.network.neutron [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Updating instance_info_cache with network_info: [{"id": "d29d7f00-6d02-4442-acbf-e91062aacfd5", "address": "fa:16:3e:a8:c5:72", "network": {"id": "2ed49793-9e8a-4fc1-b20a-f5d083c11a45", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-88986804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6ea572e12e6d45148deb1f276f2d389b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd29d7f00-6d", "ovs_interfaceid": "d29d7f00-6d02-4442-acbf-e91062aacfd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.354583] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg f6a6a486465a42c1895404df40c4bc12 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 883.368960] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6a6a486465a42c1895404df40c4bc12 [ 883.369712] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Releasing lock "refresh_cache-697e3884-2ef4-423e-af81-e5d1e94f65a2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.369996] env[62740]: DEBUG nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Instance network_info: |[{"id": "d29d7f00-6d02-4442-acbf-e91062aacfd5", "address": "fa:16:3e:a8:c5:72", "network": {"id": "2ed49793-9e8a-4fc1-b20a-f5d083c11a45", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-88986804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6ea572e12e6d45148deb1f276f2d389b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd29d7f00-6d", "ovs_interfaceid": "d29d7f00-6d02-4442-acbf-e91062aacfd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 883.370403] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:c5:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd29d7f00-6d02-4442-acbf-e91062aacfd5', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.377811] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Creating folder: Project (6ea572e12e6d45148deb1f276f2d389b). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 883.378431] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ea8c555-5e2c-4eed-923c-7474a6aabe69 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.391993] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Created folder: Project (6ea572e12e6d45148deb1f276f2d389b) in parent group-v156037. [ 883.392440] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Creating folder: Instances. Parent ref: group-v156097. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 883.392991] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d0c2104-6f7a-4d36-a183-5b385774ddc6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.407388] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Created folder: Instances in parent group-v156097. [ 883.407630] env[62740]: DEBUG oslo.service.loopingcall [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.407812] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 883.408022] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75554e1e-3741-4419-9a47-41e3ae2fc2cc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.428911] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.428911] env[62740]: value = "task-640133" [ 883.428911] env[62740]: _type = "Task" [ 883.428911] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.436603] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640133, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.890882] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.891313] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5338f1ef77f54ce28dd0efa4211178e8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 883.901990] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5338f1ef77f54ce28dd0efa4211178e8 [ 883.902413] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.902632] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.902832] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.902957] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 883.904056] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d661690-9b70-4cb8-af56-69424684ebbd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.913781] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f963b3f-1860-43eb-89aa-748da145da50 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.927112] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005e5a8a-2d8d-44c3-8023-3d2e49d5c8e5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.939234] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0685ba-46d7-44d8-9fb1-7ad363f1df0d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.942167] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640133, 'name': CreateVM_Task, 'duration_secs': 0.301536} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.942339] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 883.943670] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.943790] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.944105] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 883.945017] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8aae758a-cb78-4575-a642-b87b9a45e5a3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.972866] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181647MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 883.972866] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.973077] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.973864] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 56ab6edbb84f4d77a91ab6ffc65976b5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 883.977968] env[62740]: DEBUG oslo_vmware.api [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Waiting for the task: (returnval){ [ 883.977968] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52e34c4d-fb70-b424-4299-5b98f519645a" [ 883.977968] env[62740]: _type = "Task" [ 883.977968] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.989058] env[62740]: DEBUG oslo_vmware.api [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52e34c4d-fb70-b424-4299-5b98f519645a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.018691] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56ab6edbb84f4d77a91ab6ffc65976b5 [ 884.022936] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b26213a6479a4a8ab3f77265e4696a5d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.041069] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b26213a6479a4a8ab3f77265e4696a5d [ 884.068517] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4f0d1356-bdfb-4cb2-979a-e28f9025b311 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.068517] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 75050b95-60c6-4e44-a1d5-0d47492dd739 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.068517] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 8053d2ae-ca61-4282-aa89-83f3a2e107bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.068883] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a24df1e4-2865-4ab3-beae-0892dca12bef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.068883] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance fa5248d1-bddf-4244-a363-2113b0473980 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.068883] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6ec38a6c-f4b2-42ce-b371-5fe82d577545 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.068996] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 26712c18-d9f4-4d7d-80fb-4d527da9c1e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.069051] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 68aa9321-22ce-45a0-8323-fa8564dca46b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.070621] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 00085df9-ce61-4ccc-8ecf-16956109eb8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.070621] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 697e3884-2ef4-423e-af81-e5d1e94f65a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.070621] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 31be1dbee6c84cd781f2cab6d544fae8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.081112] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31be1dbee6c84cd781f2cab6d544fae8 [ 884.081917] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f22357ec-450c-4545-8822-74b83bfc5a35 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 884.082448] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg caeea9f289714462968c75818c045d88 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.093671] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg caeea9f289714462968c75818c045d88 [ 884.094956] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 732da1c8-e83e-4dd7-96c2-dbfa9468baab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 884.094956] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 3297ba6102224e8688ff1364fed7c04d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.105069] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3297ba6102224e8688ff1364fed7c04d [ 884.105843] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388a39df-9fa9-4153-9f3c-4ad94fd5edfb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 884.107034] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 185daaea9ad64625b32f1dfa00304be3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.116650] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 185daaea9ad64625b32f1dfa00304be3 [ 884.117983] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 5f57389d-853e-4439-872a-8345664578d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 884.117983] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 65371829e9ad46dfabb181d9d77e3ff8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.129134] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65371829e9ad46dfabb181d9d77e3ff8 [ 884.129929] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 472cd209-4192-4473-b788-d1ea342653bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 884.130481] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 6dfda94a280c4d62bb95899bc7796edb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.142017] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6dfda94a280c4d62bb95899bc7796edb [ 884.142017] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d8dac9af-0897-4fbf-8ee6-1fb3955d48c0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 884.142240] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 68fd60af241d4bd9a27edb7ecf8abad5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.152201] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68fd60af241d4bd9a27edb7ecf8abad5 [ 884.152909] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2162ea32-6407-4286-9340-b62a9ec0988e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 884.153413] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 1b750218a7e04a2c8e26dc4fa5c62b02 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.163387] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b750218a7e04a2c8e26dc4fa5c62b02 [ 884.164136] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d2669ffb-41b1-474d-bb7a-fabae11e69d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 884.164644] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg e98c714a1f514a1fbd94b5343179d5ae in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.178529] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e98c714a1f514a1fbd94b5343179d5ae [ 884.179294] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f7574228-f7fc-4ab0-9a38-7671046d46a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 884.179795] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7563cbb7d3a84053835bcd229d3a549d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.190279] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7563cbb7d3a84053835bcd229d3a549d [ 884.190996] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b1b86050-2bb1-443b-967b-12531d71ba04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 884.191979] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 93ae746b84474580b066110b09b170ca in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.204068] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93ae746b84474580b066110b09b170ca [ 884.204775] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 285dcc26-a4e9-40bc-82dd-37931f46e7fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 884.205277] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg adef4d143e22481fa0bb597a842a4e26 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.215144] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg adef4d143e22481fa0bb597a842a4e26 [ 884.216338] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b0995d6c-a700-47a3-a39d-6a6fe1462042 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 884.216448] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg d2ee23a05f78486abd8190e519b2323a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.230110] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2ee23a05f78486abd8190e519b2323a [ 884.230970] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d6c3ca16-5c7c-41e6-9850-10221603ad2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 884.231496] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0edec48d1628415aa72086c00704e62d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.244865] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0edec48d1628415aa72086c00704e62d [ 884.245668] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d60c8e65-1eb3-4017-b28e-8b72b0b4b2e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 884.246239] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg fa7e35b3adec4c37b54a25ca7d4b5f0f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.257245] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa7e35b3adec4c37b54a25ca7d4b5f0f [ 884.257934] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6531eee8-d8ec-4a9d-911c-d7d9b88baf19 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 884.258212] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 884.258398] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 884.491447] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.493087] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 884.493087] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.598952] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582f5f97-2355-4c4a-a137-29bfef4280be {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.607168] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6358d775-95d1-4980-92c7-09fe0f6955f8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.639089] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626501aa-023a-490c-ae91-276c9e375f26 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.648016] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c05547-f21e-4c66-bd1b-bc555fce7186 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.662315] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.662782] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 255c3923216c453a8b92c2cb4adc3780 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.671842] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 255c3923216c453a8b92c2cb4adc3780 [ 884.672767] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 884.676235] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8977c577af8c45718911912c595c7a3f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 884.688649] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8977c577af8c45718911912c595c7a3f [ 884.689521] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 884.689897] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.717s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.254647] env[62740]: DEBUG nova.compute.manager [req-de6bd8f6-a632-43aa-b34c-e1f32a14c843 req-e2e94b26-cf3b-41eb-a9ed-d4b1188a8e8e service nova] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Received event network-changed-d29d7f00-6d02-4442-acbf-e91062aacfd5 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 885.254784] env[62740]: DEBUG nova.compute.manager [req-de6bd8f6-a632-43aa-b34c-e1f32a14c843 req-e2e94b26-cf3b-41eb-a9ed-d4b1188a8e8e service nova] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Refreshing instance network info cache due to event network-changed-d29d7f00-6d02-4442-acbf-e91062aacfd5. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 885.254932] env[62740]: DEBUG oslo_concurrency.lockutils [req-de6bd8f6-a632-43aa-b34c-e1f32a14c843 req-e2e94b26-cf3b-41eb-a9ed-d4b1188a8e8e service nova] Acquiring lock "refresh_cache-697e3884-2ef4-423e-af81-e5d1e94f65a2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.255088] env[62740]: DEBUG oslo_concurrency.lockutils [req-de6bd8f6-a632-43aa-b34c-e1f32a14c843 req-e2e94b26-cf3b-41eb-a9ed-d4b1188a8e8e service nova] Acquired lock "refresh_cache-697e3884-2ef4-423e-af81-e5d1e94f65a2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.255251] env[62740]: DEBUG nova.network.neutron [req-de6bd8f6-a632-43aa-b34c-e1f32a14c843 req-e2e94b26-cf3b-41eb-a9ed-d4b1188a8e8e service nova] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Refreshing network info cache for port d29d7f00-6d02-4442-acbf-e91062aacfd5 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 885.255723] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-de6bd8f6-a632-43aa-b34c-e1f32a14c843 req-e2e94b26-cf3b-41eb-a9ed-d4b1188a8e8e service nova] Expecting reply to msg db5b83cf686a4397a7fb59ee903c4dd1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 885.265662] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db5b83cf686a4397a7fb59ee903c4dd1 [ 885.628360] env[62740]: DEBUG nova.network.neutron [req-de6bd8f6-a632-43aa-b34c-e1f32a14c843 req-e2e94b26-cf3b-41eb-a9ed-d4b1188a8e8e service nova] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Updated VIF entry in instance network info cache for port d29d7f00-6d02-4442-acbf-e91062aacfd5. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 885.628902] env[62740]: DEBUG nova.network.neutron [req-de6bd8f6-a632-43aa-b34c-e1f32a14c843 req-e2e94b26-cf3b-41eb-a9ed-d4b1188a8e8e service nova] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Updating instance_info_cache with network_info: [{"id": "d29d7f00-6d02-4442-acbf-e91062aacfd5", "address": "fa:16:3e:a8:c5:72", "network": {"id": "2ed49793-9e8a-4fc1-b20a-f5d083c11a45", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-88986804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6ea572e12e6d45148deb1f276f2d389b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd29d7f00-6d", "ovs_interfaceid": "d29d7f00-6d02-4442-acbf-e91062aacfd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.629274] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-de6bd8f6-a632-43aa-b34c-e1f32a14c843 req-e2e94b26-cf3b-41eb-a9ed-d4b1188a8e8e service nova] Expecting reply to msg 4272d60a7d8e4cdba67695d9724a9275 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 885.639920] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4272d60a7d8e4cdba67695d9724a9275 [ 885.640581] env[62740]: DEBUG oslo_concurrency.lockutils [req-de6bd8f6-a632-43aa-b34c-e1f32a14c843 req-e2e94b26-cf3b-41eb-a9ed-d4b1188a8e8e service nova] Releasing lock "refresh_cache-697e3884-2ef4-423e-af81-e5d1e94f65a2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.689985] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.886398] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.890092] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.890304] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 885.890503] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 885.891167] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 53b7cb249e26428e95ca5409b84a47d9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 885.916955] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53b7cb249e26428e95ca5409b84a47d9 [ 885.919464] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 885.919652] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 885.919768] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 885.919904] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 885.920036] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 885.920160] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 885.920281] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 885.920401] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 885.920516] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 885.920748] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 885.921030] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 885.921416] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.921616] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.921750] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 886.890940] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 886.891305] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 887.891131] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 888.886573] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 888.887231] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 9e74726ac15e4436a5fe4aea0a33cd69 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 888.909488] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e74726ac15e4436a5fe4aea0a33cd69 [ 890.175727] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 91845978d48449d88c695ee9a338bf15 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 890.185662] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91845978d48449d88c695ee9a338bf15 [ 891.578159] env[62740]: WARNING oslo_vmware.rw_handles [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 891.578159] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 891.578159] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 891.578159] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 891.578159] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 891.578159] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 891.578159] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 891.578159] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 891.578159] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 891.578159] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 891.578159] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 891.578159] env[62740]: ERROR oslo_vmware.rw_handles [ 891.578816] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/1419c9c4-88f2-4afd-bc10-33fdac69a158/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 891.580572] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 891.580837] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Copying Virtual Disk [datastore1] vmware_temp/1419c9c4-88f2-4afd-bc10-33fdac69a158/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/1419c9c4-88f2-4afd-bc10-33fdac69a158/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 891.581128] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10ec4d4c-680d-4126-84b7-8b048cdc6321 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.589350] env[62740]: DEBUG oslo_vmware.api [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Waiting for the task: (returnval){ [ 891.589350] env[62740]: value = "task-640137" [ 891.589350] env[62740]: _type = "Task" [ 891.589350] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.597045] env[62740]: DEBUG oslo_vmware.api [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Task: {'id': task-640137, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.100441] env[62740]: DEBUG oslo_vmware.exceptions [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 892.100752] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.101511] env[62740]: ERROR nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 892.101511] env[62740]: Faults: ['InvalidArgument'] [ 892.101511] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] Traceback (most recent call last): [ 892.101511] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 892.101511] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] yield resources [ 892.101511] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 892.101511] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] self.driver.spawn(context, instance, image_meta, [ 892.101511] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 892.101511] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] self._vmops.spawn(context, instance, image_meta, injected_files, [ 892.101511] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 892.101511] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] self._fetch_image_if_missing(context, vi) [ 892.101511] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 892.101952] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] image_cache(vi, tmp_image_ds_loc) [ 892.101952] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 892.101952] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] vm_util.copy_virtual_disk( [ 892.101952] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 892.101952] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] session._wait_for_task(vmdk_copy_task) [ 892.101952] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 892.101952] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] return self.wait_for_task(task_ref) [ 892.101952] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 892.101952] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] return evt.wait() [ 892.101952] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 892.101952] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] result = hub.switch() [ 892.101952] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 892.101952] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] return self.greenlet.switch() [ 892.102344] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 892.102344] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] self.f(*self.args, **self.kw) [ 892.102344] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 892.102344] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] raise exceptions.translate_fault(task_info.error) [ 892.102344] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 892.102344] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] Faults: ['InvalidArgument'] [ 892.102344] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] [ 892.102344] env[62740]: INFO nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Terminating instance [ 892.103550] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.103777] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 892.104039] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b70310b-62ff-4fe7-ae43-5330fec5d958 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.106384] env[62740]: DEBUG nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 892.106580] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 892.107315] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-219aa3b8-6a69-486e-9ee6-afcc32e9eb37 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.114136] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 892.114347] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f924e754-a9c4-4179-b6e2-91607b87ecdb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.116540] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 892.116707] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 892.117668] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e223629-232a-49ad-bc6e-eb44d28987ea {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.122229] env[62740]: DEBUG oslo_vmware.api [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Waiting for the task: (returnval){ [ 892.122229] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52af7b53-ff03-87a2-bdd8-38cc8d82f380" [ 892.122229] env[62740]: _type = "Task" [ 892.122229] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.129346] env[62740]: DEBUG oslo_vmware.api [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52af7b53-ff03-87a2-bdd8-38cc8d82f380, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.183766] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 892.184240] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 892.184440] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Deleting the datastore file [datastore1] fa5248d1-bddf-4244-a363-2113b0473980 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 892.184732] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4475e91d-85da-4f68-8801-47192c405ec6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.190989] env[62740]: DEBUG oslo_vmware.api [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Waiting for the task: (returnval){ [ 892.190989] env[62740]: value = "task-640139" [ 892.190989] env[62740]: _type = "Task" [ 892.190989] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.632279] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 892.632567] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Creating directory with path [datastore1] vmware_temp/a6119ab2-1bde-4c55-baaa-b7c9448a7198/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 892.632873] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88bbfc33-cb85-4758-89fe-3e9af497a14f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.644147] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Created directory with path [datastore1] vmware_temp/a6119ab2-1bde-4c55-baaa-b7c9448a7198/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 892.644342] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Fetch image to [datastore1] vmware_temp/a6119ab2-1bde-4c55-baaa-b7c9448a7198/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 892.644519] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/a6119ab2-1bde-4c55-baaa-b7c9448a7198/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 892.645334] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b7d0bb-016b-4e0a-87ef-f78d2d1f669d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.651816] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bb7d40-df45-44c1-b288-859ed3dea7c7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.660806] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db476c5c-d80f-4b2a-8bb5-2b3f452992e7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.695375] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11154907-d725-4a0f-8aee-ff81069d8382 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.702378] env[62740]: DEBUG oslo_vmware.api [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Task: {'id': task-640139, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088513} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.703828] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.704034] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 892.704218] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 892.704419] env[62740]: INFO nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Took 0.60 seconds to destroy the instance on the hypervisor. [ 892.706252] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-98680514-6eb4-4a54-bc0e-5fdfca3f86e1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.708483] env[62740]: DEBUG nova.compute.claims [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 892.708654] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.708868] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.710888] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg f7a5130824ca4628b7994669fb8aed89 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 892.730480] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 892.751642] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7a5130824ca4628b7994669fb8aed89 [ 892.791648] env[62740]: DEBUG oslo_vmware.rw_handles [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a6119ab2-1bde-4c55-baaa-b7c9448a7198/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 892.857182] env[62740]: DEBUG oslo_vmware.rw_handles [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 892.857322] env[62740]: DEBUG oslo_vmware.rw_handles [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a6119ab2-1bde-4c55-baaa-b7c9448a7198/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 893.155312] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49372aa-2ec0-401f-9a27-f7ef33e3c05f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.163216] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad0f137-c641-44df-aaf7-0bda1a5dd533 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.195621] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6020d0bd-a48b-40ee-b2b8-ea45852209af {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.205728] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94843918-eb0c-43b3-99cb-849d3ac0754e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.219536] env[62740]: DEBUG nova.compute.provider_tree [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.220071] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg e0d6f779f37d4c30a5dff50d6fb01613 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 893.234061] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0d6f779f37d4c30a5dff50d6fb01613 [ 893.234061] env[62740]: DEBUG nova.scheduler.client.report [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 893.234354] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 0ce63aea9ebe4b7eaa8386b3f60368c0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 893.255406] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ce63aea9ebe4b7eaa8386b3f60368c0 [ 893.256383] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.547s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.256929] env[62740]: ERROR nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 893.256929] env[62740]: Faults: ['InvalidArgument'] [ 893.256929] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] Traceback (most recent call last): [ 893.256929] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 893.256929] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] self.driver.spawn(context, instance, image_meta, [ 893.256929] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 893.256929] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] self._vmops.spawn(context, instance, image_meta, injected_files, [ 893.256929] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 893.256929] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] self._fetch_image_if_missing(context, vi) [ 893.256929] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 893.256929] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] image_cache(vi, tmp_image_ds_loc) [ 893.256929] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 893.257531] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] vm_util.copy_virtual_disk( [ 893.257531] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 893.257531] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] session._wait_for_task(vmdk_copy_task) [ 893.257531] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 893.257531] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] return self.wait_for_task(task_ref) [ 893.257531] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 893.257531] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] return evt.wait() [ 893.257531] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 893.257531] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] result = hub.switch() [ 893.257531] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 893.257531] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] return self.greenlet.switch() [ 893.257531] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 893.257531] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] self.f(*self.args, **self.kw) [ 893.258144] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 893.258144] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] raise exceptions.translate_fault(task_info.error) [ 893.258144] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 893.258144] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] Faults: ['InvalidArgument'] [ 893.258144] env[62740]: ERROR nova.compute.manager [instance: fa5248d1-bddf-4244-a363-2113b0473980] [ 893.258144] env[62740]: DEBUG nova.compute.utils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 893.259319] env[62740]: DEBUG nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Build of instance fa5248d1-bddf-4244-a363-2113b0473980 was re-scheduled: A specified parameter was not correct: fileType [ 893.259319] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 893.259758] env[62740]: DEBUG nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 893.259944] env[62740]: DEBUG nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 893.260117] env[62740]: DEBUG nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 893.260287] env[62740]: DEBUG nova.network.neutron [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 893.715199] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 65013dbe80664ce9871a518f3ba2f236 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 893.745201] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65013dbe80664ce9871a518f3ba2f236 [ 893.745785] env[62740]: DEBUG nova.network.neutron [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.746299] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 48c8160f4e2a45fa88b1e16c0dc1ddfe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 893.761232] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48c8160f4e2a45fa88b1e16c0dc1ddfe [ 893.761858] env[62740]: INFO nova.compute.manager [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Took 0.50 seconds to deallocate network for instance. [ 893.765791] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 45b259ade1604a68b6041d91147a7785 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 893.799044] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45b259ade1604a68b6041d91147a7785 [ 893.802348] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg d386014521394e1da0ae0ed5e6323016 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 893.839238] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d386014521394e1da0ae0ed5e6323016 [ 893.865033] env[62740]: INFO nova.scheduler.client.report [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Deleted allocations for instance fa5248d1-bddf-4244-a363-2113b0473980 [ 893.870474] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 305051825feb4295b7e85bce1c3772f5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 893.886024] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 305051825feb4295b7e85bce1c3772f5 [ 893.886462] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f3f15d7-7aeb-415b-aae2-370b8c6de428 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "fa5248d1-bddf-4244-a363-2113b0473980" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 278.401s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.887030] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 93b108816bcb44cda66ad64c96143a58 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 893.887775] env[62740]: DEBUG oslo_concurrency.lockutils [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "fa5248d1-bddf-4244-a363-2113b0473980" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 80.474s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.887998] env[62740]: DEBUG oslo_concurrency.lockutils [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Acquiring lock "fa5248d1-bddf-4244-a363-2113b0473980-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.888274] env[62740]: DEBUG oslo_concurrency.lockutils [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "fa5248d1-bddf-4244-a363-2113b0473980-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.888458] env[62740]: DEBUG oslo_concurrency.lockutils [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "fa5248d1-bddf-4244-a363-2113b0473980-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.891066] env[62740]: INFO nova.compute.manager [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Terminating instance [ 893.893254] env[62740]: DEBUG nova.compute.manager [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 893.893449] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 893.893705] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99704c49-eb48-4ff1-81ed-740fcc98ab5f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.903957] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4018e37d-2789-4ec2-bf8c-dc1adf18adfd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.916851] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93b108816bcb44cda66ad64c96143a58 [ 893.917457] env[62740]: DEBUG nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 893.919376] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 8f4f529b4d9f408e8d9ec6d508585cff in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 893.938717] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fa5248d1-bddf-4244-a363-2113b0473980 could not be found. [ 893.938938] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 893.939137] env[62740]: INFO nova.compute.manager [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Took 0.05 seconds to destroy the instance on the hypervisor. [ 893.939426] env[62740]: DEBUG oslo.service.loopingcall [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 893.939943] env[62740]: DEBUG nova.compute.manager [-] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 893.939943] env[62740]: DEBUG nova.network.neutron [-] [instance: fa5248d1-bddf-4244-a363-2113b0473980] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 893.958912] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 76959ae340324d67afcdfb185e002b1c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 893.965058] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76959ae340324d67afcdfb185e002b1c [ 893.965445] env[62740]: DEBUG nova.network.neutron [-] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.965891] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3b55300eeec94df88f23f456a1fc202e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 893.978732] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b55300eeec94df88f23f456a1fc202e [ 893.979490] env[62740]: INFO nova.compute.manager [-] [instance: fa5248d1-bddf-4244-a363-2113b0473980] Took 0.04 seconds to deallocate network for instance. [ 893.983262] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 7182693a956a4122b8ed0fe1077c4d9a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 893.986166] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f4f529b4d9f408e8d9ec6d508585cff [ 894.001125] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.001438] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.002803] env[62740]: INFO nova.compute.claims [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.004371] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 9f3ddcaaaeb94d2287b633dbef92d7a3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 894.009298] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7182693a956a4122b8ed0fe1077c4d9a [ 894.026393] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 87b9e10e8f104a97baa67a3c4ceb0ff3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 894.043780] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f3ddcaaaeb94d2287b633dbef92d7a3 [ 894.045525] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg c0c425d4fb9342afb5a8318a21382055 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 894.053624] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0c425d4fb9342afb5a8318a21382055 [ 894.066021] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87b9e10e8f104a97baa67a3c4ceb0ff3 [ 894.069900] env[62740]: DEBUG oslo_concurrency.lockutils [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Lock "fa5248d1-bddf-4244-a363-2113b0473980" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.070251] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-cf68cba5-44a0-4c3e-b442-a0e0c8f4e916 tempest-ListImageFiltersTestJSON-1031002993 tempest-ListImageFiltersTestJSON-1031002993-project-member] Expecting reply to msg 58b2fd8372b444cd88df13e2b7ae13a3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 894.082645] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58b2fd8372b444cd88df13e2b7ae13a3 [ 894.352151] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2b945f-c5e1-4a28-be80-d39135f44c41 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.359877] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67def161-a92a-49bf-8c74-7df582e54978 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.390319] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ae7244-1c62-4dc4-9b77-dc255c3946ac {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.397353] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7cef8b-477e-4009-bc1d-79b6c1f233e8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.410117] env[62740]: DEBUG nova.compute.provider_tree [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.410641] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 87fe59f0f6c649aabacb54127f682a27 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 894.419800] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87fe59f0f6c649aabacb54127f682a27 [ 894.420758] env[62740]: DEBUG nova.scheduler.client.report [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 894.423143] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 5fcced534cbc42d1be79e9821114c84c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 894.433796] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fcced534cbc42d1be79e9821114c84c [ 894.434545] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.433s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.435019] env[62740]: DEBUG nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 894.436668] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 8bcbb74eaf784119b0f0e694c663f3cc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 894.470042] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bcbb74eaf784119b0f0e694c663f3cc [ 894.470918] env[62740]: DEBUG nova.compute.utils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 894.472444] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 0b93f5b83cc041f3ba873c87d5bd2fad in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 894.472955] env[62740]: DEBUG nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 894.472955] env[62740]: DEBUG nova.network.neutron [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 894.481954] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b93f5b83cc041f3ba873c87d5bd2fad [ 894.482686] env[62740]: DEBUG nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 894.483816] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg a7e447381b7f4ccda56b30fa5419d6aa in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 894.514837] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7e447381b7f4ccda56b30fa5419d6aa [ 894.517995] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg e2a091f354d74d55864d8e26bd012bdb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 894.543642] env[62740]: DEBUG nova.policy [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dddd74cb4b7d4711be29f9adb6b49d34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c685a9b7969f494a9f1ea727bb2ff492', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 894.554249] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2a091f354d74d55864d8e26bd012bdb [ 894.555481] env[62740]: DEBUG nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 894.577759] env[62740]: DEBUG nova.virt.hardware [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 894.577999] env[62740]: DEBUG nova.virt.hardware [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 894.578172] env[62740]: DEBUG nova.virt.hardware [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 894.578501] env[62740]: DEBUG nova.virt.hardware [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 894.578809] env[62740]: DEBUG nova.virt.hardware [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 894.579086] env[62740]: DEBUG nova.virt.hardware [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 894.579241] env[62740]: DEBUG nova.virt.hardware [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 894.579400] env[62740]: DEBUG nova.virt.hardware [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 894.579750] env[62740]: DEBUG nova.virt.hardware [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 894.579750] env[62740]: DEBUG nova.virt.hardware [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 894.579898] env[62740]: DEBUG nova.virt.hardware [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 894.580869] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474628dc-bff9-4d69-97fc-b8174043da56 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.588705] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8e3cf9-481d-4c28-912c-d7ddae750d87 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.971462] env[62740]: DEBUG nova.network.neutron [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Successfully created port: 488ed98b-f8df-4275-a42c-c0365dba7070 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 896.079708] env[62740]: DEBUG nova.compute.manager [req-1ba3355a-a700-4752-8827-c77cde3d8bff req-779713d4-afde-4e4f-8e01-4cf472ef1b04 service nova] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Received event network-vif-plugged-488ed98b-f8df-4275-a42c-c0365dba7070 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 896.080098] env[62740]: DEBUG oslo_concurrency.lockutils [req-1ba3355a-a700-4752-8827-c77cde3d8bff req-779713d4-afde-4e4f-8e01-4cf472ef1b04 service nova] Acquiring lock "f22357ec-450c-4545-8822-74b83bfc5a35-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.080137] env[62740]: DEBUG oslo_concurrency.lockutils [req-1ba3355a-a700-4752-8827-c77cde3d8bff req-779713d4-afde-4e4f-8e01-4cf472ef1b04 service nova] Lock "f22357ec-450c-4545-8822-74b83bfc5a35-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.080314] env[62740]: DEBUG oslo_concurrency.lockutils [req-1ba3355a-a700-4752-8827-c77cde3d8bff req-779713d4-afde-4e4f-8e01-4cf472ef1b04 service nova] Lock "f22357ec-450c-4545-8822-74b83bfc5a35-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.080463] env[62740]: DEBUG nova.compute.manager [req-1ba3355a-a700-4752-8827-c77cde3d8bff req-779713d4-afde-4e4f-8e01-4cf472ef1b04 service nova] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] No waiting events found dispatching network-vif-plugged-488ed98b-f8df-4275-a42c-c0365dba7070 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 896.080656] env[62740]: WARNING nova.compute.manager [req-1ba3355a-a700-4752-8827-c77cde3d8bff req-779713d4-afde-4e4f-8e01-4cf472ef1b04 service nova] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Received unexpected event network-vif-plugged-488ed98b-f8df-4275-a42c-c0365dba7070 for instance with vm_state building and task_state spawning. [ 896.121458] env[62740]: DEBUG nova.network.neutron [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Successfully updated port: 488ed98b-f8df-4275-a42c-c0365dba7070 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 896.121995] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 46b9bb1399fb4203b6a4404e147d9db8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 896.142918] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46b9bb1399fb4203b6a4404e147d9db8 [ 896.143653] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Acquiring lock "refresh_cache-f22357ec-450c-4545-8822-74b83bfc5a35" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.143722] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Acquired lock "refresh_cache-f22357ec-450c-4545-8822-74b83bfc5a35" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.143999] env[62740]: DEBUG nova.network.neutron [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 896.144284] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg d8babca1b02b4ab2924bd8b0bdd0c4c7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 896.152749] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8babca1b02b4ab2924bd8b0bdd0c4c7 [ 896.240305] env[62740]: DEBUG nova.network.neutron [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 896.573616] env[62740]: DEBUG nova.network.neutron [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Updating instance_info_cache with network_info: [{"id": "488ed98b-f8df-4275-a42c-c0365dba7070", "address": "fa:16:3e:ce:32:94", "network": {"id": "970cd08f-3b66-450c-b224-58ccf47a76cc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1787034591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c685a9b7969f494a9f1ea727bb2ff492", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap488ed98b-f8", "ovs_interfaceid": "488ed98b-f8df-4275-a42c-c0365dba7070", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.574406] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg dceaaa04a80d486296be8b84536019c0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 896.589992] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dceaaa04a80d486296be8b84536019c0 [ 896.590885] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Releasing lock "refresh_cache-f22357ec-450c-4545-8822-74b83bfc5a35" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.590954] env[62740]: DEBUG nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Instance network_info: |[{"id": "488ed98b-f8df-4275-a42c-c0365dba7070", "address": "fa:16:3e:ce:32:94", "network": {"id": "970cd08f-3b66-450c-b224-58ccf47a76cc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1787034591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c685a9b7969f494a9f1ea727bb2ff492", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap488ed98b-f8", "ovs_interfaceid": "488ed98b-f8df-4275-a42c-c0365dba7070", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 896.591322] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:32:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11da2092-76f7-447e-babb-8fc14ad39a71', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '488ed98b-f8df-4275-a42c-c0365dba7070', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 896.599285] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Creating folder: Project (c685a9b7969f494a9f1ea727bb2ff492). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 896.599954] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54af8c9e-e730-4bc0-9503-50aa73909f8a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.614511] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Created folder: Project (c685a9b7969f494a9f1ea727bb2ff492) in parent group-v156037. [ 896.614785] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Creating folder: Instances. Parent ref: group-v156103. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 896.615065] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b23d0a02-ab05-4ea2-b41a-00e64b1e0bf0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.625210] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Created folder: Instances in parent group-v156103. [ 896.625485] env[62740]: DEBUG oslo.service.loopingcall [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.625708] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 896.625939] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3783cae7-fb99-4c64-aff6-f2c0d6967d02 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.646764] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 896.646764] env[62740]: value = "task-640144" [ 896.646764] env[62740]: _type = "Task" [ 896.646764] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.655144] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640144, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.161355] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640144, 'name': CreateVM_Task, 'duration_secs': 0.313335} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.161602] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 897.162301] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.162499] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.162843] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 897.163128] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61ee2d4f-c9d5-4c78-8450-2fd5b484249d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.170339] env[62740]: DEBUG oslo_vmware.api [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Waiting for the task: (returnval){ [ 897.170339] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52f0bd32-e7b4-c582-ff36-fa7b67cf2b1f" [ 897.170339] env[62740]: _type = "Task" [ 897.170339] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.184986] env[62740]: DEBUG oslo_vmware.api [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52f0bd32-e7b4-c582-ff36-fa7b67cf2b1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.682193] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.682193] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 897.682193] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.570243] env[62740]: DEBUG nova.compute.manager [req-61370c47-3f61-4400-810c-16e422e4a422 req-01051fb7-319b-479c-b844-884d77214ef2 service nova] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Received event network-changed-488ed98b-f8df-4275-a42c-c0365dba7070 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 898.570603] env[62740]: DEBUG nova.compute.manager [req-61370c47-3f61-4400-810c-16e422e4a422 req-01051fb7-319b-479c-b844-884d77214ef2 service nova] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Refreshing instance network info cache due to event network-changed-488ed98b-f8df-4275-a42c-c0365dba7070. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 898.571203] env[62740]: DEBUG oslo_concurrency.lockutils [req-61370c47-3f61-4400-810c-16e422e4a422 req-01051fb7-319b-479c-b844-884d77214ef2 service nova] Acquiring lock "refresh_cache-f22357ec-450c-4545-8822-74b83bfc5a35" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.571385] env[62740]: DEBUG oslo_concurrency.lockutils [req-61370c47-3f61-4400-810c-16e422e4a422 req-01051fb7-319b-479c-b844-884d77214ef2 service nova] Acquired lock "refresh_cache-f22357ec-450c-4545-8822-74b83bfc5a35" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.571557] env[62740]: DEBUG nova.network.neutron [req-61370c47-3f61-4400-810c-16e422e4a422 req-01051fb7-319b-479c-b844-884d77214ef2 service nova] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Refreshing network info cache for port 488ed98b-f8df-4275-a42c-c0365dba7070 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 898.572906] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-61370c47-3f61-4400-810c-16e422e4a422 req-01051fb7-319b-479c-b844-884d77214ef2 service nova] Expecting reply to msg aa0c9433a2d34527bc3ae90191d978e9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 898.583519] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa0c9433a2d34527bc3ae90191d978e9 [ 899.163998] env[62740]: DEBUG nova.network.neutron [req-61370c47-3f61-4400-810c-16e422e4a422 req-01051fb7-319b-479c-b844-884d77214ef2 service nova] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Updated VIF entry in instance network info cache for port 488ed98b-f8df-4275-a42c-c0365dba7070. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 899.164379] env[62740]: DEBUG nova.network.neutron [req-61370c47-3f61-4400-810c-16e422e4a422 req-01051fb7-319b-479c-b844-884d77214ef2 service nova] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Updating instance_info_cache with network_info: [{"id": "488ed98b-f8df-4275-a42c-c0365dba7070", "address": "fa:16:3e:ce:32:94", "network": {"id": "970cd08f-3b66-450c-b224-58ccf47a76cc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1787034591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c685a9b7969f494a9f1ea727bb2ff492", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap488ed98b-f8", "ovs_interfaceid": "488ed98b-f8df-4275-a42c-c0365dba7070", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.164895] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-61370c47-3f61-4400-810c-16e422e4a422 req-01051fb7-319b-479c-b844-884d77214ef2 service nova] Expecting reply to msg b2cb7acc2c2f4290b969cacd5f7634f8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 899.177297] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2cb7acc2c2f4290b969cacd5f7634f8 [ 899.177937] env[62740]: DEBUG oslo_concurrency.lockutils [req-61370c47-3f61-4400-810c-16e422e4a422 req-01051fb7-319b-479c-b844-884d77214ef2 service nova] Releasing lock "refresh_cache-f22357ec-450c-4545-8822-74b83bfc5a35" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.839936] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Acquiring lock "158406db-7196-4826-aefa-20a58daa186b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.840423] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Lock "158406db-7196-4826-aefa-20a58daa186b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.572059] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg c36fb52883944dfe91b442493a773f95 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 902.595312] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c36fb52883944dfe91b442493a773f95 [ 902.595841] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Acquiring lock "697e3884-2ef4-423e-af81-e5d1e94f65a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.485217] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Acquiring lock "cf00af51-2b31-4b99-a692-8b0851dd74b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.485829] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Lock "cf00af51-2b31-4b99-a692-8b0851dd74b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.588649] env[62740]: WARNING oslo_vmware.rw_handles [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 926.588649] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 926.588649] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 926.588649] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 926.588649] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 926.588649] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 926.588649] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 926.588649] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 926.588649] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 926.588649] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 926.588649] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 926.588649] env[62740]: ERROR oslo_vmware.rw_handles [ 926.589244] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/0a19e8b9-0bfb-4d8c-86b9-780d521c9f1a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 926.591327] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 926.591593] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Copying Virtual Disk [datastore2] vmware_temp/0a19e8b9-0bfb-4d8c-86b9-780d521c9f1a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/0a19e8b9-0bfb-4d8c-86b9-780d521c9f1a/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 926.591899] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0df0cad5-9b20-4ba4-af00-b07735f94ce2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.599751] env[62740]: DEBUG oslo_vmware.api [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Waiting for the task: (returnval){ [ 926.599751] env[62740]: value = "task-640150" [ 926.599751] env[62740]: _type = "Task" [ 926.599751] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.607451] env[62740]: DEBUG oslo_vmware.api [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Task: {'id': task-640150, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.110044] env[62740]: DEBUG oslo_vmware.exceptions [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 927.110345] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.110903] env[62740]: ERROR nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 927.110903] env[62740]: Faults: ['InvalidArgument'] [ 927.110903] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Traceback (most recent call last): [ 927.110903] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 927.110903] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] yield resources [ 927.110903] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 927.110903] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] self.driver.spawn(context, instance, image_meta, [ 927.110903] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 927.110903] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] self._vmops.spawn(context, instance, image_meta, injected_files, [ 927.110903] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 927.110903] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] self._fetch_image_if_missing(context, vi) [ 927.110903] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 927.111317] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] image_cache(vi, tmp_image_ds_loc) [ 927.111317] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 927.111317] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] vm_util.copy_virtual_disk( [ 927.111317] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 927.111317] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] session._wait_for_task(vmdk_copy_task) [ 927.111317] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 927.111317] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] return self.wait_for_task(task_ref) [ 927.111317] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 927.111317] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] return evt.wait() [ 927.111317] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 927.111317] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] result = hub.switch() [ 927.111317] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 927.111317] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] return self.greenlet.switch() [ 927.111729] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 927.111729] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] self.f(*self.args, **self.kw) [ 927.111729] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 927.111729] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] raise exceptions.translate_fault(task_info.error) [ 927.111729] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 927.111729] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Faults: ['InvalidArgument'] [ 927.111729] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] [ 927.111729] env[62740]: INFO nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Terminating instance [ 927.112812] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.113086] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 927.113270] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0162ed92-2efb-4433-b823-dfb33b39c878 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.115891] env[62740]: DEBUG nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 927.116099] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 927.116825] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43eef4e4-c2e4-4702-96c3-9cd701fdffc1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.123501] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 927.123617] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d463510-284f-4252-a2d8-17298bef9f5c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.125730] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 927.125903] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 927.126857] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35382d5c-e694-49cd-983f-ffc5948c0913 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.131281] env[62740]: DEBUG oslo_vmware.api [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Waiting for the task: (returnval){ [ 927.131281] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]520379c1-71b2-97c8-9e69-3f7bb708dace" [ 927.131281] env[62740]: _type = "Task" [ 927.131281] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.138312] env[62740]: DEBUG oslo_vmware.api [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]520379c1-71b2-97c8-9e69-3f7bb708dace, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.183839] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 927.184198] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 927.184504] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Deleting the datastore file [datastore2] 4f0d1356-bdfb-4cb2-979a-e28f9025b311 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 927.184815] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-665009ee-7e8e-4888-905a-1e824b78e30d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.192042] env[62740]: DEBUG oslo_vmware.api [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Waiting for the task: (returnval){ [ 927.192042] env[62740]: value = "task-640152" [ 927.192042] env[62740]: _type = "Task" [ 927.192042] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.201277] env[62740]: DEBUG oslo_vmware.api [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Task: {'id': task-640152, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.641319] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 927.641603] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Creating directory with path [datastore2] vmware_temp/6485efd3-d5b6-4f25-8997-450bdea6fa1e/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 927.641839] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c81cba1a-945f-4c25-9864-e3b3043987f5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.654602] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Created directory with path [datastore2] vmware_temp/6485efd3-d5b6-4f25-8997-450bdea6fa1e/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 927.654800] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Fetch image to [datastore2] vmware_temp/6485efd3-d5b6-4f25-8997-450bdea6fa1e/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 927.654973] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/6485efd3-d5b6-4f25-8997-450bdea6fa1e/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 927.655722] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59fe21b1-55fc-4ae1-b572-b531755e0ebe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.662299] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e39b9e-fe75-49ab-a11f-dcf8989c5db4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.671120] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b979cb-0adf-4a48-b9ec-87b5433cea8a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.703025] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d0eba6-3572-408c-8db7-48e36197e988 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.709466] env[62740]: DEBUG oslo_vmware.api [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Task: {'id': task-640152, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080476} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.710837] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 927.711038] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 927.711221] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 927.711399] env[62740]: INFO nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Took 0.60 seconds to destroy the instance on the hypervisor. [ 927.713144] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7c9eb733-03f4-4361-9769-724f335e01b7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.715364] env[62740]: DEBUG nova.compute.claims [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 927.715924] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.715924] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.717731] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 1fd9352cf99543e78913de3ea37e3132 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 927.739109] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 927.759559] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fd9352cf99543e78913de3ea37e3132 [ 927.789816] env[62740]: DEBUG oslo_vmware.rw_handles [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6485efd3-d5b6-4f25-8997-450bdea6fa1e/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 927.850483] env[62740]: DEBUG oslo_vmware.rw_handles [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 927.850683] env[62740]: DEBUG oslo_vmware.rw_handles [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6485efd3-d5b6-4f25-8997-450bdea6fa1e/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 928.109226] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ec83ac-159e-4e36-8a72-dc412eea6874 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.116833] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e86e0a1e-0f85-4697-a046-85700dfdd567 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.147694] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f07924-49af-4c41-ad6e-d0278abb42da {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.155020] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2aa241c-0137-481e-b963-2297171dcdd9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.168875] env[62740]: DEBUG nova.compute.provider_tree [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.169380] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg f15988f43baf4d05b0383c76e3dd7ac8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.176755] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f15988f43baf4d05b0383c76e3dd7ac8 [ 928.177816] env[62740]: DEBUG nova.scheduler.client.report [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 928.180176] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 7d87ce78c5324b5abdc23d522686dfb9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.198141] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d87ce78c5324b5abdc23d522686dfb9 [ 928.199061] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.483s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.199609] env[62740]: ERROR nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 928.199609] env[62740]: Faults: ['InvalidArgument'] [ 928.199609] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Traceback (most recent call last): [ 928.199609] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 928.199609] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] self.driver.spawn(context, instance, image_meta, [ 928.199609] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 928.199609] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] self._vmops.spawn(context, instance, image_meta, injected_files, [ 928.199609] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 928.199609] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] self._fetch_image_if_missing(context, vi) [ 928.199609] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 928.199609] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] image_cache(vi, tmp_image_ds_loc) [ 928.199609] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 928.199994] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] vm_util.copy_virtual_disk( [ 928.199994] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 928.199994] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] session._wait_for_task(vmdk_copy_task) [ 928.199994] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 928.199994] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] return self.wait_for_task(task_ref) [ 928.199994] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 928.199994] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] return evt.wait() [ 928.199994] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 928.199994] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] result = hub.switch() [ 928.199994] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 928.199994] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] return self.greenlet.switch() [ 928.199994] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 928.199994] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] self.f(*self.args, **self.kw) [ 928.200302] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 928.200302] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] raise exceptions.translate_fault(task_info.error) [ 928.200302] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 928.200302] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Faults: ['InvalidArgument'] [ 928.200302] env[62740]: ERROR nova.compute.manager [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] [ 928.200435] env[62740]: DEBUG nova.compute.utils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 928.201840] env[62740]: DEBUG nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Build of instance 4f0d1356-bdfb-4cb2-979a-e28f9025b311 was re-scheduled: A specified parameter was not correct: fileType [ 928.201840] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 928.202235] env[62740]: DEBUG nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 928.202409] env[62740]: DEBUG nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 928.202582] env[62740]: DEBUG nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 928.202746] env[62740]: DEBUG nova.network.neutron [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 928.599168] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg c953d93d67e947a9ada8f0ff91a7679d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.620091] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c953d93d67e947a9ada8f0ff91a7679d [ 928.620091] env[62740]: DEBUG nova.network.neutron [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.621173] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg ce0d390b5b3141a8a78aeffc84c2b04e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.633834] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce0d390b5b3141a8a78aeffc84c2b04e [ 928.634488] env[62740]: INFO nova.compute.manager [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Took 0.43 seconds to deallocate network for instance. [ 928.636392] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 1093ecd3e32e4b4f9d0e67b2befcaf17 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.688395] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1093ecd3e32e4b4f9d0e67b2befcaf17 [ 928.691103] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 89d5c383a12441a580204ff65b2b83ce in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.724952] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89d5c383a12441a580204ff65b2b83ce [ 928.760977] env[62740]: INFO nova.scheduler.client.report [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Deleted allocations for instance 4f0d1356-bdfb-4cb2-979a-e28f9025b311 [ 928.769077] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 5c39c27f519f4244a4a421085bbd7dda in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.784024] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c39c27f519f4244a4a421085bbd7dda [ 928.784623] env[62740]: DEBUG oslo_concurrency.lockutils [None req-389898eb-848e-44b8-949c-3460f2a1990b tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Lock "4f0d1356-bdfb-4cb2-979a-e28f9025b311" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 332.952s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.785227] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg e57ffb31472345a3ad0dcfe4f54a02c1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.786390] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Lock "4f0d1356-bdfb-4cb2-979a-e28f9025b311" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 131.582s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.786721] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Acquiring lock "4f0d1356-bdfb-4cb2-979a-e28f9025b311-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.786943] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Lock "4f0d1356-bdfb-4cb2-979a-e28f9025b311-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.787132] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Lock "4f0d1356-bdfb-4cb2-979a-e28f9025b311-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.789159] env[62740]: INFO nova.compute.manager [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Terminating instance [ 928.791057] env[62740]: DEBUG nova.compute.manager [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 928.791419] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 928.793394] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f668d7af-c956-43e0-bafd-829c95083c68 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.799521] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e57ffb31472345a3ad0dcfe4f54a02c1 [ 928.800027] env[62740]: DEBUG nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 928.801705] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg ef49d066fe004043812ff3ecb55d2fd3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.806053] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60e2321-7a5c-418f-acc6-6db161d8505b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.834861] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4f0d1356-bdfb-4cb2-979a-e28f9025b311 could not be found. [ 928.835075] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 928.836182] env[62740]: INFO nova.compute.manager [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Took 0.04 seconds to destroy the instance on the hypervisor. [ 928.836182] env[62740]: DEBUG oslo.service.loopingcall [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 928.836182] env[62740]: DEBUG nova.compute.manager [-] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 928.836182] env[62740]: DEBUG nova.network.neutron [-] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 928.842843] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef49d066fe004043812ff3ecb55d2fd3 [ 928.858734] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.858968] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.860486] env[62740]: INFO nova.compute.claims [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 928.862271] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg a73de079c74d4db581527176a0863d78 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.864879] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7ac7b8ffe3d54ae3bd255236ad2c17e5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.871375] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ac7b8ffe3d54ae3bd255236ad2c17e5 [ 928.871722] env[62740]: DEBUG nova.network.neutron [-] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.872326] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b64e4cd9766a4dc08307eb5d40eeb07e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.881112] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b64e4cd9766a4dc08307eb5d40eeb07e [ 928.881730] env[62740]: INFO nova.compute.manager [-] [instance: 4f0d1356-bdfb-4cb2-979a-e28f9025b311] Took 0.05 seconds to deallocate network for instance. [ 928.885237] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 5d57256954d04b52aa66706354a4a2c3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.903314] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a73de079c74d4db581527176a0863d78 [ 928.904656] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg 57326bc76fc64d5bbee2c23dac429ef1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.911420] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57326bc76fc64d5bbee2c23dac429ef1 [ 928.923260] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d57256954d04b52aa66706354a4a2c3 [ 928.937281] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 0bdfedae00094f66b19e92b5d2364723 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.973674] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0bdfedae00094f66b19e92b5d2364723 [ 928.976461] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Lock "4f0d1356-bdfb-4cb2-979a-e28f9025b311" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.976804] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e41ba793-fbea-410e-856b-eaeff48b7825 tempest-AttachInterfacesV270Test-942908702 tempest-AttachInterfacesV270Test-942908702-project-member] Expecting reply to msg 1f4415b36d774ac0a64a78507a324f03 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 928.989115] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f4415b36d774ac0a64a78507a324f03 [ 929.191654] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131be2c6-4952-4ef3-a5e0-785ee3adc798 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.199224] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e790995-c154-42d8-bf6b-e0c36bb060ed {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.229876] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc40548-bfe8-4ea2-81d0-a59a3be3942c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.238971] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c50a3d6-e5c0-496c-ac84-eca3a6cb7be8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.255265] env[62740]: DEBUG nova.compute.provider_tree [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.255836] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg 29ce5362e347418ebe27cb0f26b6d9cd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 929.264342] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29ce5362e347418ebe27cb0f26b6d9cd [ 929.265228] env[62740]: DEBUG nova.scheduler.client.report [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 929.267741] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg c693a6473d664fc4b40de0acc6dc4560 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 929.281801] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c693a6473d664fc4b40de0acc6dc4560 [ 929.282585] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.424s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.283066] env[62740]: DEBUG nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 929.284710] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg b92df86cb1bd49cb8f252ef29f83452f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 929.328018] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b92df86cb1bd49cb8f252ef29f83452f [ 929.328018] env[62740]: DEBUG nova.compute.utils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 929.328441] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg fb128adad238456692aa5fc62ccc44bb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 929.329384] env[62740]: DEBUG nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 929.329683] env[62740]: DEBUG nova.network.neutron [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 929.337567] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb128adad238456692aa5fc62ccc44bb [ 929.338299] env[62740]: DEBUG nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 929.340180] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg 861f17fced604429a4922f7f38b321c2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 929.372953] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 861f17fced604429a4922f7f38b321c2 [ 929.374025] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg c319bcfda0954acc9dc8202f1b30f2b6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 929.387045] env[62740]: DEBUG nova.policy [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e21b693bee374cfa8d6b8cfb014628b4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6960e3335e404bbd876727d953c6f6cd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 929.403661] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c319bcfda0954acc9dc8202f1b30f2b6 [ 929.404870] env[62740]: DEBUG nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 929.436284] env[62740]: DEBUG nova.virt.hardware [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 929.436420] env[62740]: DEBUG nova.virt.hardware [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 929.436589] env[62740]: DEBUG nova.virt.hardware [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 929.436804] env[62740]: DEBUG nova.virt.hardware [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 929.436921] env[62740]: DEBUG nova.virt.hardware [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 929.437126] env[62740]: DEBUG nova.virt.hardware [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 929.437349] env[62740]: DEBUG nova.virt.hardware [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 929.437519] env[62740]: DEBUG nova.virt.hardware [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 929.437692] env[62740]: DEBUG nova.virt.hardware [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 929.437858] env[62740]: DEBUG nova.virt.hardware [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 929.438044] env[62740]: DEBUG nova.virt.hardware [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 929.439023] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b223d83-2fa6-4217-b089-c2a376420c88 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.451701] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0a7164-6f2e-4b9f-87b6-3897f90baa94 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.857980] env[62740]: DEBUG nova.network.neutron [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Successfully created port: f8e3f17c-5bfe-4355-b2ac-7c21651a12d7 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 930.974619] env[62740]: DEBUG nova.compute.manager [req-63abc793-1f2e-4ce1-a75f-344b40f6382c req-19a8d125-fed2-4f64-bb38-f859132f7d31 service nova] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Received event network-vif-plugged-f8e3f17c-5bfe-4355-b2ac-7c21651a12d7 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 930.974846] env[62740]: DEBUG oslo_concurrency.lockutils [req-63abc793-1f2e-4ce1-a75f-344b40f6382c req-19a8d125-fed2-4f64-bb38-f859132f7d31 service nova] Acquiring lock "732da1c8-e83e-4dd7-96c2-dbfa9468baab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.975143] env[62740]: DEBUG oslo_concurrency.lockutils [req-63abc793-1f2e-4ce1-a75f-344b40f6382c req-19a8d125-fed2-4f64-bb38-f859132f7d31 service nova] Lock "732da1c8-e83e-4dd7-96c2-dbfa9468baab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.975243] env[62740]: DEBUG oslo_concurrency.lockutils [req-63abc793-1f2e-4ce1-a75f-344b40f6382c req-19a8d125-fed2-4f64-bb38-f859132f7d31 service nova] Lock "732da1c8-e83e-4dd7-96c2-dbfa9468baab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.975410] env[62740]: DEBUG nova.compute.manager [req-63abc793-1f2e-4ce1-a75f-344b40f6382c req-19a8d125-fed2-4f64-bb38-f859132f7d31 service nova] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] No waiting events found dispatching network-vif-plugged-f8e3f17c-5bfe-4355-b2ac-7c21651a12d7 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 930.975741] env[62740]: WARNING nova.compute.manager [req-63abc793-1f2e-4ce1-a75f-344b40f6382c req-19a8d125-fed2-4f64-bb38-f859132f7d31 service nova] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Received unexpected event network-vif-plugged-f8e3f17c-5bfe-4355-b2ac-7c21651a12d7 for instance with vm_state building and task_state spawning. [ 931.004825] env[62740]: DEBUG nova.network.neutron [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Successfully updated port: f8e3f17c-5bfe-4355-b2ac-7c21651a12d7 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 931.004825] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg c758a25554974a91b67042084b7a01c6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 931.015890] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c758a25554974a91b67042084b7a01c6 [ 931.016664] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Acquiring lock "refresh_cache-732da1c8-e83e-4dd7-96c2-dbfa9468baab" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.016752] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Acquired lock "refresh_cache-732da1c8-e83e-4dd7-96c2-dbfa9468baab" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.016870] env[62740]: DEBUG nova.network.neutron [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 931.017282] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg 14c28d7d2c3c4ce6ba5e60c87cd1b8b8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 931.026613] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14c28d7d2c3c4ce6ba5e60c87cd1b8b8 [ 931.089488] env[62740]: DEBUG nova.network.neutron [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 931.361647] env[62740]: DEBUG nova.network.neutron [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Updating instance_info_cache with network_info: [{"id": "f8e3f17c-5bfe-4355-b2ac-7c21651a12d7", "address": "fa:16:3e:dd:25:84", "network": {"id": "dba7b176-b5dc-4a4d-a4e7-7b575c1dbb47", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2056579504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6960e3335e404bbd876727d953c6f6cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ac3fd84-c373-49f5-82dc-784a6cdb686d", "external-id": "nsx-vlan-transportzone-298", "segmentation_id": 298, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8e3f17c-5b", "ovs_interfaceid": "f8e3f17c-5bfe-4355-b2ac-7c21651a12d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.362193] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg 230c5e509ab345a08d9c3dab930767fc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 931.374917] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 230c5e509ab345a08d9c3dab930767fc [ 931.375558] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Releasing lock "refresh_cache-732da1c8-e83e-4dd7-96c2-dbfa9468baab" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.375907] env[62740]: DEBUG nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Instance network_info: |[{"id": "f8e3f17c-5bfe-4355-b2ac-7c21651a12d7", "address": "fa:16:3e:dd:25:84", "network": {"id": "dba7b176-b5dc-4a4d-a4e7-7b575c1dbb47", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2056579504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6960e3335e404bbd876727d953c6f6cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ac3fd84-c373-49f5-82dc-784a6cdb686d", "external-id": "nsx-vlan-transportzone-298", "segmentation_id": 298, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8e3f17c-5b", "ovs_interfaceid": "f8e3f17c-5bfe-4355-b2ac-7c21651a12d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 931.376309] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:25:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ac3fd84-c373-49f5-82dc-784a6cdb686d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8e3f17c-5bfe-4355-b2ac-7c21651a12d7', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 931.383870] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Creating folder: Project (6960e3335e404bbd876727d953c6f6cd). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 931.384425] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e67c2ebc-55fe-437d-a44a-8bb849327f46 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.394406] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Created folder: Project (6960e3335e404bbd876727d953c6f6cd) in parent group-v156037. [ 931.394597] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Creating folder: Instances. Parent ref: group-v156107. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 931.394811] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-078601b0-d1bf-4395-b7a8-7e72d614e801 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.403364] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Created folder: Instances in parent group-v156107. [ 931.403682] env[62740]: DEBUG oslo.service.loopingcall [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 931.403899] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 931.404141] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa48624b-9d5f-4144-bb87-2dc0a939c11e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.424867] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 931.424867] env[62740]: value = "task-640155" [ 931.424867] env[62740]: _type = "Task" [ 931.424867] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.432426] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640155, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.939506] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640155, 'name': CreateVM_Task, 'duration_secs': 0.287783} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.939711] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 931.940404] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.940574] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.940877] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 931.941149] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9805728-2b61-433e-9df8-2d6d9be503ea {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.946149] env[62740]: DEBUG oslo_vmware.api [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Waiting for the task: (returnval){ [ 931.946149] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52deb99a-1178-1587-69bb-cdb75e1c7fd2" [ 931.946149] env[62740]: _type = "Task" [ 931.946149] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.954564] env[62740]: DEBUG oslo_vmware.api [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52deb99a-1178-1587-69bb-cdb75e1c7fd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.458623] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.458916] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 932.459113] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.111319] env[62740]: DEBUG nova.compute.manager [req-9110df78-8fad-4b2f-aa19-d6afb65dca0d req-1d581a3e-2709-4ecb-9921-2f4f7fd0b3c0 service nova] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Received event network-changed-f8e3f17c-5bfe-4355-b2ac-7c21651a12d7 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 933.111319] env[62740]: DEBUG nova.compute.manager [req-9110df78-8fad-4b2f-aa19-d6afb65dca0d req-1d581a3e-2709-4ecb-9921-2f4f7fd0b3c0 service nova] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Refreshing instance network info cache due to event network-changed-f8e3f17c-5bfe-4355-b2ac-7c21651a12d7. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 933.111319] env[62740]: DEBUG oslo_concurrency.lockutils [req-9110df78-8fad-4b2f-aa19-d6afb65dca0d req-1d581a3e-2709-4ecb-9921-2f4f7fd0b3c0 service nova] Acquiring lock "refresh_cache-732da1c8-e83e-4dd7-96c2-dbfa9468baab" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.111319] env[62740]: DEBUG oslo_concurrency.lockutils [req-9110df78-8fad-4b2f-aa19-d6afb65dca0d req-1d581a3e-2709-4ecb-9921-2f4f7fd0b3c0 service nova] Acquired lock "refresh_cache-732da1c8-e83e-4dd7-96c2-dbfa9468baab" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.111319] env[62740]: DEBUG nova.network.neutron [req-9110df78-8fad-4b2f-aa19-d6afb65dca0d req-1d581a3e-2709-4ecb-9921-2f4f7fd0b3c0 service nova] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Refreshing network info cache for port f8e3f17c-5bfe-4355-b2ac-7c21651a12d7 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 933.112290] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-9110df78-8fad-4b2f-aa19-d6afb65dca0d req-1d581a3e-2709-4ecb-9921-2f4f7fd0b3c0 service nova] Expecting reply to msg 7e77623d73d34862ac0c44d65365b326 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 933.119893] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e77623d73d34862ac0c44d65365b326 [ 933.742627] env[62740]: DEBUG nova.network.neutron [req-9110df78-8fad-4b2f-aa19-d6afb65dca0d req-1d581a3e-2709-4ecb-9921-2f4f7fd0b3c0 service nova] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Updated VIF entry in instance network info cache for port f8e3f17c-5bfe-4355-b2ac-7c21651a12d7. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 933.743465] env[62740]: DEBUG nova.network.neutron [req-9110df78-8fad-4b2f-aa19-d6afb65dca0d req-1d581a3e-2709-4ecb-9921-2f4f7fd0b3c0 service nova] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Updating instance_info_cache with network_info: [{"id": "f8e3f17c-5bfe-4355-b2ac-7c21651a12d7", "address": "fa:16:3e:dd:25:84", "network": {"id": "dba7b176-b5dc-4a4d-a4e7-7b575c1dbb47", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2056579504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6960e3335e404bbd876727d953c6f6cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ac3fd84-c373-49f5-82dc-784a6cdb686d", "external-id": "nsx-vlan-transportzone-298", "segmentation_id": 298, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8e3f17c-5b", "ovs_interfaceid": "f8e3f17c-5bfe-4355-b2ac-7c21651a12d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.743617] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-9110df78-8fad-4b2f-aa19-d6afb65dca0d req-1d581a3e-2709-4ecb-9921-2f4f7fd0b3c0 service nova] Expecting reply to msg a5435430fa6a47dbbbb8ef411df86507 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 933.753038] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5435430fa6a47dbbbb8ef411df86507 [ 933.753680] env[62740]: DEBUG oslo_concurrency.lockutils [req-9110df78-8fad-4b2f-aa19-d6afb65dca0d req-1d581a3e-2709-4ecb-9921-2f4f7fd0b3c0 service nova] Releasing lock "refresh_cache-732da1c8-e83e-4dd7-96c2-dbfa9468baab" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.323604] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "6ca702af-1a5c-40bb-b6c7-2f55ca308c02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.323877] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "6ca702af-1a5c-40bb-b6c7-2f55ca308c02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.724705] env[62740]: WARNING oslo_vmware.rw_handles [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 938.724705] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 938.724705] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 938.724705] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 938.724705] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 938.724705] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 938.724705] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 938.724705] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 938.724705] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 938.724705] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 938.724705] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 938.724705] env[62740]: ERROR oslo_vmware.rw_handles [ 938.725098] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/a6119ab2-1bde-4c55-baaa-b7c9448a7198/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 938.726996] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 938.727252] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Copying Virtual Disk [datastore1] vmware_temp/a6119ab2-1bde-4c55-baaa-b7c9448a7198/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/a6119ab2-1bde-4c55-baaa-b7c9448a7198/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 938.727558] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b247fa74-7b28-4823-9eef-11d541b35238 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.735470] env[62740]: DEBUG oslo_vmware.api [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Waiting for the task: (returnval){ [ 938.735470] env[62740]: value = "task-640156" [ 938.735470] env[62740]: _type = "Task" [ 938.735470] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.743190] env[62740]: DEBUG oslo_vmware.api [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Task: {'id': task-640156, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.245954] env[62740]: DEBUG oslo_vmware.exceptions [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 939.246325] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.246917] env[62740]: ERROR nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 939.246917] env[62740]: Faults: ['InvalidArgument'] [ 939.246917] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Traceback (most recent call last): [ 939.246917] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 939.246917] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] yield resources [ 939.246917] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 939.246917] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] self.driver.spawn(context, instance, image_meta, [ 939.246917] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 939.246917] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] self._vmops.spawn(context, instance, image_meta, injected_files, [ 939.246917] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 939.246917] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] self._fetch_image_if_missing(context, vi) [ 939.246917] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 939.247243] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] image_cache(vi, tmp_image_ds_loc) [ 939.247243] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 939.247243] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] vm_util.copy_virtual_disk( [ 939.247243] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 939.247243] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] session._wait_for_task(vmdk_copy_task) [ 939.247243] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 939.247243] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] return self.wait_for_task(task_ref) [ 939.247243] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 939.247243] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] return evt.wait() [ 939.247243] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 939.247243] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] result = hub.switch() [ 939.247243] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 939.247243] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] return self.greenlet.switch() [ 939.247578] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 939.247578] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] self.f(*self.args, **self.kw) [ 939.247578] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 939.247578] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] raise exceptions.translate_fault(task_info.error) [ 939.247578] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 939.247578] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Faults: ['InvalidArgument'] [ 939.247578] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] [ 939.247578] env[62740]: INFO nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Terminating instance [ 939.248887] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.249105] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 939.249804] env[62740]: DEBUG nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 939.249987] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 939.250237] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77a06b06-8710-4156-9b5a-1b760e14ba2a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.252706] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3b6a5e-c116-4092-b2d3-6117af89d439 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.259830] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 939.260099] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf9f7a4d-89aa-42eb-acbe-264b77ec4858 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.262537] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 939.262718] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 939.263791] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8dcdc11-0397-4f47-b61f-2fc4fe9de28e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.268875] env[62740]: DEBUG oslo_vmware.api [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for the task: (returnval){ [ 939.268875] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52450c37-b3de-0c94-ae10-d2901737a3cf" [ 939.268875] env[62740]: _type = "Task" [ 939.268875] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.277979] env[62740]: DEBUG oslo_vmware.api [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52450c37-b3de-0c94-ae10-d2901737a3cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.336038] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 939.336540] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 939.336837] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Deleting the datastore file [datastore1] 6ec38a6c-f4b2-42ce-b371-5fe82d577545 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 939.337131] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a407ea98-7960-4990-971c-12558d792a9a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.343954] env[62740]: DEBUG oslo_vmware.api [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Waiting for the task: (returnval){ [ 939.343954] env[62740]: value = "task-640158" [ 939.343954] env[62740]: _type = "Task" [ 939.343954] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.352045] env[62740]: DEBUG oslo_vmware.api [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Task: {'id': task-640158, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.779501] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 939.779771] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Creating directory with path [datastore1] vmware_temp/7a6afe2b-c90b-417b-b977-8c542d8eddae/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 939.780021] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-097a360c-ad80-4c50-8ade-01d7f7854c02 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.791433] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Created directory with path [datastore1] vmware_temp/7a6afe2b-c90b-417b-b977-8c542d8eddae/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 939.791630] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Fetch image to [datastore1] vmware_temp/7a6afe2b-c90b-417b-b977-8c542d8eddae/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 939.791803] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/7a6afe2b-c90b-417b-b977-8c542d8eddae/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 939.792596] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affb2d77-02c8-459a-a21c-99c89955f3b6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.800891] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ddfa348-59f9-4228-8c43-d9d564600d71 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.528947] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459c6c65-a451-4655-8923-5aca92064b52 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.538459] env[62740]: DEBUG oslo_vmware.api [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Task: {'id': task-640158, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070755} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.563581] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 940.563749] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 940.563928] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 940.564123] env[62740]: INFO nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Took 1.31 seconds to destroy the instance on the hypervisor. [ 940.566283] env[62740]: DEBUG nova.compute.claims [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 940.566452] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.566669] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.568594] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg a58a87ce1fac4ccd8b8d5b1bfd245a41 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 940.570113] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0415f92-b0d1-408d-80d3-4b9b83024691 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.576857] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ec393e66-e3c3-4fa1-a32b-a757beb4dbf5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.600803] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 940.612699] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a58a87ce1fac4ccd8b8d5b1bfd245a41 [ 940.664634] env[62740]: DEBUG oslo_vmware.rw_handles [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7a6afe2b-c90b-417b-b977-8c542d8eddae/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 940.730588] env[62740]: DEBUG oslo_vmware.rw_handles [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 940.730807] env[62740]: DEBUG oslo_vmware.rw_handles [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7a6afe2b-c90b-417b-b977-8c542d8eddae/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 941.001711] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a43732-0c90-4649-9d0c-d2275049304d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.009098] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee94c280-3623-4ec8-aa67-ceff91efdd5b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.039765] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d87942-4d7f-4ecd-8e93-0d3c6301d213 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.046850] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce03e60-a8bd-482a-a157-875cd75fbbde {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.059514] env[62740]: DEBUG nova.compute.provider_tree [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.059998] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 635a8cac131246ffa8a0dd01f09fc4f0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.067719] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 635a8cac131246ffa8a0dd01f09fc4f0 [ 941.068603] env[62740]: DEBUG nova.scheduler.client.report [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 941.071034] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 46965efa19c140549c8f7279e389f959 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.085193] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46965efa19c140549c8f7279e389f959 [ 941.085942] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.519s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.086482] env[62740]: ERROR nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 941.086482] env[62740]: Faults: ['InvalidArgument'] [ 941.086482] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Traceback (most recent call last): [ 941.086482] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 941.086482] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] self.driver.spawn(context, instance, image_meta, [ 941.086482] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 941.086482] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] self._vmops.spawn(context, instance, image_meta, injected_files, [ 941.086482] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 941.086482] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] self._fetch_image_if_missing(context, vi) [ 941.086482] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 941.086482] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] image_cache(vi, tmp_image_ds_loc) [ 941.086482] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 941.086849] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] vm_util.copy_virtual_disk( [ 941.086849] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 941.086849] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] session._wait_for_task(vmdk_copy_task) [ 941.086849] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 941.086849] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] return self.wait_for_task(task_ref) [ 941.086849] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 941.086849] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] return evt.wait() [ 941.086849] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 941.086849] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] result = hub.switch() [ 941.086849] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 941.086849] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] return self.greenlet.switch() [ 941.086849] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 941.086849] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] self.f(*self.args, **self.kw) [ 941.087208] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 941.087208] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] raise exceptions.translate_fault(task_info.error) [ 941.087208] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 941.087208] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Faults: ['InvalidArgument'] [ 941.087208] env[62740]: ERROR nova.compute.manager [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] [ 941.087208] env[62740]: DEBUG nova.compute.utils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 941.088688] env[62740]: DEBUG nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Build of instance 6ec38a6c-f4b2-42ce-b371-5fe82d577545 was re-scheduled: A specified parameter was not correct: fileType [ 941.088688] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 941.089071] env[62740]: DEBUG nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 941.089302] env[62740]: DEBUG nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 941.089506] env[62740]: DEBUG nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 941.089673] env[62740]: DEBUG nova.network.neutron [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 941.418308] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg cd1287a108bc4140a0da2a2db95b6b07 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.428727] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd1287a108bc4140a0da2a2db95b6b07 [ 941.429830] env[62740]: DEBUG nova.network.neutron [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.429903] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 956f88083423423ab5d822e48d9ea025 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.443090] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 956f88083423423ab5d822e48d9ea025 [ 941.443694] env[62740]: INFO nova.compute.manager [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Took 0.35 seconds to deallocate network for instance. [ 941.445478] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 802239df7a2544ffbbede3380c132924 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.480322] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 802239df7a2544ffbbede3380c132924 [ 941.483054] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 6ec39e2758f44b0893fc78482115c02b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.521025] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ec39e2758f44b0893fc78482115c02b [ 941.551074] env[62740]: INFO nova.scheduler.client.report [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Deleted allocations for instance 6ec38a6c-f4b2-42ce-b371-5fe82d577545 [ 941.558027] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg e818eae335554186b63b489e04039b99 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.570703] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e818eae335554186b63b489e04039b99 [ 941.571335] env[62740]: DEBUG oslo_concurrency.lockutils [None req-eefd7416-a00e-4bfd-991f-0744933142bd tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Lock "6ec38a6c-f4b2-42ce-b371-5fe82d577545" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 323.947s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.571961] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 7d4ed382c4cf4324a130a5ec2257ed7e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.572786] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Lock "6ec38a6c-f4b2-42ce-b371-5fe82d577545" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 123.725s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.573022] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Acquiring lock "6ec38a6c-f4b2-42ce-b371-5fe82d577545-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.573234] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Lock "6ec38a6c-f4b2-42ce-b371-5fe82d577545-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.573455] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Lock "6ec38a6c-f4b2-42ce-b371-5fe82d577545-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.575729] env[62740]: INFO nova.compute.manager [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Terminating instance [ 941.577407] env[62740]: DEBUG nova.compute.manager [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 941.577605] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 941.578083] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-00946193-f55e-4461-a97a-4ef2dc82ca3d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.586036] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d4ed382c4cf4324a130a5ec2257ed7e [ 941.587569] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3687b28e-e541-4f88-a2e2-3325bf6a8bfb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.597952] env[62740]: DEBUG nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 941.599601] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 588f03406296410cb30bae835c45ae82 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.624994] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6ec38a6c-f4b2-42ce-b371-5fe82d577545 could not be found. [ 941.625220] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 941.625412] env[62740]: INFO nova.compute.manager [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Took 0.05 seconds to destroy the instance on the hypervisor. [ 941.625693] env[62740]: DEBUG oslo.service.loopingcall [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.625917] env[62740]: DEBUG nova.compute.manager [-] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 941.626020] env[62740]: DEBUG nova.network.neutron [-] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 941.633907] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 588f03406296410cb30bae835c45ae82 [ 941.647780] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.648027] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.649574] env[62740]: INFO nova.compute.claims [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 941.651214] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 2b733e00ad5e4a379bd35277e4f1588c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.652683] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6b48c358ff5445e499e0838c1987b630 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.660607] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b48c358ff5445e499e0838c1987b630 [ 941.660919] env[62740]: DEBUG nova.network.neutron [-] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.661257] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2dc6dbbe1c844789bef71745764dc9d5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.681087] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2dc6dbbe1c844789bef71745764dc9d5 [ 941.681350] env[62740]: INFO nova.compute.manager [-] [instance: 6ec38a6c-f4b2-42ce-b371-5fe82d577545] Took 0.06 seconds to deallocate network for instance. [ 941.685112] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 4446f7c29dcf4adc97915cd677aec0df in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.688758] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b733e00ad5e4a379bd35277e4f1588c [ 941.690271] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg c533e84063a345b08d31c6a08c85e272 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.697276] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c533e84063a345b08d31c6a08c85e272 [ 941.713408] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4446f7c29dcf4adc97915cd677aec0df [ 941.727224] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg 44dfa1dbba6b407c992ca043a380f606 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.766436] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44dfa1dbba6b407c992ca043a380f606 [ 941.770883] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Lock "6ec38a6c-f4b2-42ce-b371-5fe82d577545" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.196s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.770883] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9a0130e3-7864-4bc8-9559-c44a939b4a34 tempest-ServersAdminTestJSON-1870870725 tempest-ServersAdminTestJSON-1870870725-project-member] Expecting reply to msg dc9b39604afa420daa3a720997a35f3b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 941.783609] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc9b39604afa420daa3a720997a35f3b [ 942.003039] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c543de8d-b432-44d6-ace2-a1eb60affc58 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.010678] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb30081-675b-4b29-8673-d93c57e65597 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.038817] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36594fc8-aab5-479a-a1fb-93132decc4a7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.045493] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2521920-d5be-48f7-bdab-13d6dce951c9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.058814] env[62740]: DEBUG nova.compute.provider_tree [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.059309] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 5242da88661044f0a2e0068a6c268b70 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 942.067483] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5242da88661044f0a2e0068a6c268b70 [ 942.068104] env[62740]: DEBUG nova.scheduler.client.report [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 942.070605] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 402c6b0ccfa1490a9da61694fad465e5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 942.084748] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 402c6b0ccfa1490a9da61694fad465e5 [ 942.085476] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.437s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.085933] env[62740]: DEBUG nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 942.087541] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg ffca0d2754bd4a58bac9f974abce963e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 942.126584] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffca0d2754bd4a58bac9f974abce963e [ 942.127896] env[62740]: DEBUG nova.compute.utils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 942.128619] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 63d91e6ef75847979e285759f7bd1501 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 942.129364] env[62740]: DEBUG nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 942.129542] env[62740]: DEBUG nova.network.neutron [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 942.141048] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63d91e6ef75847979e285759f7bd1501 [ 942.141591] env[62740]: DEBUG nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 942.143193] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg d94ba088d1b14c93bd78100c04be9ad2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 942.173837] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d94ba088d1b14c93bd78100c04be9ad2 [ 942.176406] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 96d1ddf596e4439ebca881b271dc2206 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 942.207802] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96d1ddf596e4439ebca881b271dc2206 [ 942.209013] env[62740]: DEBUG nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 942.236102] env[62740]: DEBUG nova.virt.hardware [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 942.236356] env[62740]: DEBUG nova.virt.hardware [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 942.236517] env[62740]: DEBUG nova.virt.hardware [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.236702] env[62740]: DEBUG nova.virt.hardware [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 942.236849] env[62740]: DEBUG nova.virt.hardware [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.236998] env[62740]: DEBUG nova.virt.hardware [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 942.237265] env[62740]: DEBUG nova.virt.hardware [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 942.237434] env[62740]: DEBUG nova.virt.hardware [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 942.237639] env[62740]: DEBUG nova.virt.hardware [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 942.237758] env[62740]: DEBUG nova.virt.hardware [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 942.237927] env[62740]: DEBUG nova.virt.hardware [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 942.238824] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a00010-b8c9-4ef1-90e7-952bdf924974 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.242588] env[62740]: DEBUG nova.policy [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '017aef872af749f3833b65f279808836', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e3dba04fe444522a3b09a32eeb47140', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 942.249880] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f869c5a7-55a8-4674-9df4-c9ac5c4f28d4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.333682] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 6a0dfdcd760e495db6b3b38b5ed07555 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 942.344870] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a0dfdcd760e495db6b3b38b5ed07555 [ 942.345383] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Acquiring lock "f22357ec-450c-4545-8822-74b83bfc5a35" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.792850] env[62740]: DEBUG nova.network.neutron [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Successfully created port: a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 943.777110] env[62740]: DEBUG nova.compute.manager [req-9e878e2f-6db5-4d29-a291-5237c53f9201 req-b0127020-75c4-420c-9dc2-5a92b20c698f service nova] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Received event network-vif-plugged-a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 943.777110] env[62740]: DEBUG oslo_concurrency.lockutils [req-9e878e2f-6db5-4d29-a291-5237c53f9201 req-b0127020-75c4-420c-9dc2-5a92b20c698f service nova] Acquiring lock "388a39df-9fa9-4153-9f3c-4ad94fd5edfb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.777110] env[62740]: DEBUG oslo_concurrency.lockutils [req-9e878e2f-6db5-4d29-a291-5237c53f9201 req-b0127020-75c4-420c-9dc2-5a92b20c698f service nova] Lock "388a39df-9fa9-4153-9f3c-4ad94fd5edfb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.777110] env[62740]: DEBUG oslo_concurrency.lockutils [req-9e878e2f-6db5-4d29-a291-5237c53f9201 req-b0127020-75c4-420c-9dc2-5a92b20c698f service nova] Lock "388a39df-9fa9-4153-9f3c-4ad94fd5edfb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.777239] env[62740]: DEBUG nova.compute.manager [req-9e878e2f-6db5-4d29-a291-5237c53f9201 req-b0127020-75c4-420c-9dc2-5a92b20c698f service nova] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] No waiting events found dispatching network-vif-plugged-a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 943.777239] env[62740]: WARNING nova.compute.manager [req-9e878e2f-6db5-4d29-a291-5237c53f9201 req-b0127020-75c4-420c-9dc2-5a92b20c698f service nova] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Received unexpected event network-vif-plugged-a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf for instance with vm_state building and task_state spawning. [ 943.850720] env[62740]: DEBUG nova.network.neutron [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Successfully updated port: a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 943.850720] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg b8e5d6afc25f443190db7bc418ab206e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 943.863971] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8e5d6afc25f443190db7bc418ab206e [ 943.863971] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "refresh_cache-388a39df-9fa9-4153-9f3c-4ad94fd5edfb" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.863971] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquired lock "refresh_cache-388a39df-9fa9-4153-9f3c-4ad94fd5edfb" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.863971] env[62740]: DEBUG nova.network.neutron [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 943.863971] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 66d4d33b2c37426a97525940d3d606fe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 943.871827] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66d4d33b2c37426a97525940d3d606fe [ 943.923958] env[62740]: DEBUG nova.network.neutron [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 944.185160] env[62740]: DEBUG nova.network.neutron [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Updating instance_info_cache with network_info: [{"id": "a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf", "address": "fa:16:3e:5d:65:33", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa518be5e-ee", "ovs_interfaceid": "a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.188317] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 60e7477e12ce4cf69d45aae2388dbf2b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 944.205624] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60e7477e12ce4cf69d45aae2388dbf2b [ 944.206502] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Releasing lock "refresh_cache-388a39df-9fa9-4153-9f3c-4ad94fd5edfb" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.206923] env[62740]: DEBUG nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Instance network_info: |[{"id": "a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf", "address": "fa:16:3e:5d:65:33", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa518be5e-ee", "ovs_interfaceid": "a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 944.207227] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:65:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 944.215704] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Creating folder: Project (8e3dba04fe444522a3b09a32eeb47140). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 944.216339] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5e95887-090a-44d2-887e-bb77ce7993eb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.227817] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Created folder: Project (8e3dba04fe444522a3b09a32eeb47140) in parent group-v156037. [ 944.227935] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Creating folder: Instances. Parent ref: group-v156110. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 944.228941] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-996eadf4-525d-48f8-9ad6-5eca53ee34ba {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.237381] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Created folder: Instances in parent group-v156110. [ 944.237627] env[62740]: DEBUG oslo.service.loopingcall [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.237817] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 944.238040] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e10a1c5a-d62a-4ea3-bec4-8c7f3e7116fc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.258922] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 944.258922] env[62740]: value = "task-640161" [ 944.258922] env[62740]: _type = "Task" [ 944.258922] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.267558] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640161, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.771350] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640161, 'name': CreateVM_Task, 'duration_secs': 0.290467} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.771784] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 944.772545] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.773018] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.773357] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 944.773623] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b91bf27-da74-4014-96fc-175e50ab9de5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.778788] env[62740]: DEBUG oslo_vmware.api [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Waiting for the task: (returnval){ [ 944.778788] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]523be9b6-216a-02bf-db71-47ddb6b227d1" [ 944.778788] env[62740]: _type = "Task" [ 944.778788] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.786300] env[62740]: DEBUG oslo_vmware.api [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]523be9b6-216a-02bf-db71-47ddb6b227d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.291917] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.292318] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 945.292425] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.806648] env[62740]: DEBUG nova.compute.manager [req-508d82d3-79bc-49ff-9c42-6dcc5d7a6ece req-f9282982-8164-4e7f-8758-cfdef1192104 service nova] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Received event network-changed-a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 945.806894] env[62740]: DEBUG nova.compute.manager [req-508d82d3-79bc-49ff-9c42-6dcc5d7a6ece req-f9282982-8164-4e7f-8758-cfdef1192104 service nova] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Refreshing instance network info cache due to event network-changed-a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 945.807073] env[62740]: DEBUG oslo_concurrency.lockutils [req-508d82d3-79bc-49ff-9c42-6dcc5d7a6ece req-f9282982-8164-4e7f-8758-cfdef1192104 service nova] Acquiring lock "refresh_cache-388a39df-9fa9-4153-9f3c-4ad94fd5edfb" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.807221] env[62740]: DEBUG oslo_concurrency.lockutils [req-508d82d3-79bc-49ff-9c42-6dcc5d7a6ece req-f9282982-8164-4e7f-8758-cfdef1192104 service nova] Acquired lock "refresh_cache-388a39df-9fa9-4153-9f3c-4ad94fd5edfb" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.807487] env[62740]: DEBUG nova.network.neutron [req-508d82d3-79bc-49ff-9c42-6dcc5d7a6ece req-f9282982-8164-4e7f-8758-cfdef1192104 service nova] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Refreshing network info cache for port a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 945.807862] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-508d82d3-79bc-49ff-9c42-6dcc5d7a6ece req-f9282982-8164-4e7f-8758-cfdef1192104 service nova] Expecting reply to msg 22c5145f1fe249bfac86f112edaffdf5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 945.815307] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22c5145f1fe249bfac86f112edaffdf5 [ 945.889842] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 945.890131] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 945.890294] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 945.890939] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg a2fe26d8aa5540179db9f9a79a7c3d79 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 945.916694] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2fe26d8aa5540179db9f9a79a7c3d79 [ 945.917018] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 945.917889] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 945.918060] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 945.918639] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 945.918639] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 945.918639] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 945.918639] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 945.918774] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 945.918874] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 945.919387] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 945.919387] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 945.920058] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 945.920264] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 945.920429] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 945.920863] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 4559178eb63f410fb8e1def6580bc6aa in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 945.931092] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4559178eb63f410fb8e1def6580bc6aa [ 945.932221] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.932221] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.932393] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.932546] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 945.933633] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b0c66d-74d6-4fa2-af3d-1b36fb3f71c7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.945045] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19366306-9bde-4d70-91cf-24ce535e7ef5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.960190] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae71c438-0ec2-4585-a2d1-753a2ef7e33f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.967536] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed34f2d-17be-472d-ac8b-2f1abfe85d18 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.002768] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181627MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 946.002931] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.003159] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.004285] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg c1a58e1e13a3494a8ea3415c667703b2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.045019] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1a58e1e13a3494a8ea3415c667703b2 [ 946.048356] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7edcc6463e7c47d9b55f263c10234577 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.059469] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7edcc6463e7c47d9b55f263c10234577 [ 946.085384] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 75050b95-60c6-4e44-a1d5-0d47492dd739 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 946.085751] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 8053d2ae-ca61-4282-aa89-83f3a2e107bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 946.086157] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a24df1e4-2865-4ab3-beae-0892dca12bef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 946.086157] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 26712c18-d9f4-4d7d-80fb-4d527da9c1e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 946.086259] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 68aa9321-22ce-45a0-8323-fa8564dca46b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 946.086323] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 00085df9-ce61-4ccc-8ecf-16956109eb8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 946.086443] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 697e3884-2ef4-423e-af81-e5d1e94f65a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 946.086561] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f22357ec-450c-4545-8822-74b83bfc5a35 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 946.086679] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 732da1c8-e83e-4dd7-96c2-dbfa9468baab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 946.086797] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388a39df-9fa9-4153-9f3c-4ad94fd5edfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 946.087915] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b4ff251265f94441ae102dee31e2b49c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.104030] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4ff251265f94441ae102dee31e2b49c [ 946.104326] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 5f57389d-853e-4439-872a-8345664578d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.104844] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2f3e87cc27d9403985f1be7be9b80155 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.118551] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f3e87cc27d9403985f1be7be9b80155 [ 946.119311] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 472cd209-4192-4473-b788-d1ea342653bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.119838] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg fa1b8e1d5f264cc88b7c079d16bd6313 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.130624] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa1b8e1d5f264cc88b7c079d16bd6313 [ 946.131405] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d8dac9af-0897-4fbf-8ee6-1fb3955d48c0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.131848] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b50bf14ceba840caba95a9d52279b1a4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.143159] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b50bf14ceba840caba95a9d52279b1a4 [ 946.143846] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2162ea32-6407-4286-9340-b62a9ec0988e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.144464] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg a9d35e14161e4c4ab0a89118cdd6fb7f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.155699] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9d35e14161e4c4ab0a89118cdd6fb7f [ 946.156424] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d2669ffb-41b1-474d-bb7a-fabae11e69d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.156917] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0ed8c327d44e4226bec86390c9f9627c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.167943] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ed8c327d44e4226bec86390c9f9627c [ 946.168677] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f7574228-f7fc-4ab0-9a38-7671046d46a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.169290] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2091b4608906413b85a862dfeb4c0224 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.181430] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2091b4608906413b85a862dfeb4c0224 [ 946.182151] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b1b86050-2bb1-443b-967b-12531d71ba04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.182634] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 17dba47c999b43458933b66e69a4f74a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.194213] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17dba47c999b43458933b66e69a4f74a [ 946.194658] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 285dcc26-a4e9-40bc-82dd-37931f46e7fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.195268] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 3ccee95132ee4522961e43f32c288a76 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.210084] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ccee95132ee4522961e43f32c288a76 [ 946.210084] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b0995d6c-a700-47a3-a39d-6a6fe1462042 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.210084] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 859e41780468460981c34dfc522d5939 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.224600] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 859e41780468460981c34dfc522d5939 [ 946.225418] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d6c3ca16-5c7c-41e6-9850-10221603ad2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.225985] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 94d4b4e6026a4c89b54636ca1a6cca6a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.244753] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94d4b4e6026a4c89b54636ca1a6cca6a [ 946.245712] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d60c8e65-1eb3-4017-b28e-8b72b0b4b2e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.246216] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b6b5084ecc0d4d11afddd6229f5783a5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.262049] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6b5084ecc0d4d11afddd6229f5783a5 [ 946.262049] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6531eee8-d8ec-4a9d-911c-d7d9b88baf19 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.262049] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8cbd062960f140d59334d65f325e8ba1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.273098] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cbd062960f140d59334d65f325e8ba1 [ 946.273907] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 158406db-7196-4826-aefa-20a58daa186b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.274691] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b344c7285b4f4afebdc9286dd4f4f76a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.286631] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b344c7285b4f4afebdc9286dd4f4f76a [ 946.287447] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance cf00af51-2b31-4b99-a692-8b0851dd74b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.287963] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 916d86c59d7d49a19cf476f38e8b8b8d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.301721] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 916d86c59d7d49a19cf476f38e8b8b8d [ 946.302701] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6ca702af-1a5c-40bb-b6c7-2f55ca308c02 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.302701] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 946.302859] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 946.555410] env[62740]: DEBUG nova.network.neutron [req-508d82d3-79bc-49ff-9c42-6dcc5d7a6ece req-f9282982-8164-4e7f-8758-cfdef1192104 service nova] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Updated VIF entry in instance network info cache for port a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 946.555788] env[62740]: DEBUG nova.network.neutron [req-508d82d3-79bc-49ff-9c42-6dcc5d7a6ece req-f9282982-8164-4e7f-8758-cfdef1192104 service nova] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Updating instance_info_cache with network_info: [{"id": "a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf", "address": "fa:16:3e:5d:65:33", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa518be5e-ee", "ovs_interfaceid": "a518be5e-ee13-4ea2-b780-5ac0b6ceb0bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.556358] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-508d82d3-79bc-49ff-9c42-6dcc5d7a6ece req-f9282982-8164-4e7f-8758-cfdef1192104 service nova] Expecting reply to msg 84edf46924f5406091a22a256d42821b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.567610] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84edf46924f5406091a22a256d42821b [ 946.568358] env[62740]: DEBUG oslo_concurrency.lockutils [req-508d82d3-79bc-49ff-9c42-6dcc5d7a6ece req-f9282982-8164-4e7f-8758-cfdef1192104 service nova] Releasing lock "refresh_cache-388a39df-9fa9-4153-9f3c-4ad94fd5edfb" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.660092] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4973a65-6d75-4fe3-bc3c-0e16a9c6a1dd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.667665] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7b7fda-8ef7-45b9-a37b-ffd28a95c5af {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.698654] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45daab18-222d-49a2-b172-f4b9d7892f88 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.708578] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249c4757-9335-4078-9bec-e26c901938ca {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.719304] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.719831] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2ee06c71503d45418bf29387902ee5bd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.727069] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ee06c71503d45418bf29387902ee5bd [ 946.727958] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 946.730464] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 38fa410b2e2d4c11abf2a8096c45abe6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 946.745678] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38fa410b2e2d4c11abf2a8096c45abe6 [ 946.746408] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 946.746652] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.743s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.717682] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 947.717916] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 947.718034] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 947.886037] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 947.890476] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 948.891136] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.542354] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Acquiring lock "e21a5624-20ca-45d8-a0bf-dd87cec1c701" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.542354] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Lock "e21a5624-20ca-45d8-a0bf-dd87cec1c701" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.389502] env[62740]: WARNING oslo_vmware.rw_handles [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 976.389502] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 976.389502] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 976.389502] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 976.389502] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 976.389502] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 976.389502] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 976.389502] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 976.389502] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 976.389502] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 976.389502] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 976.389502] env[62740]: ERROR oslo_vmware.rw_handles [ 976.390146] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/6485efd3-d5b6-4f25-8997-450bdea6fa1e/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 976.392242] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 976.392418] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Copying Virtual Disk [datastore2] vmware_temp/6485efd3-d5b6-4f25-8997-450bdea6fa1e/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/6485efd3-d5b6-4f25-8997-450bdea6fa1e/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 976.392695] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4658bc1-8616-43e7-a6a9-bb9047a5f33d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.400431] env[62740]: DEBUG oslo_vmware.api [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Waiting for the task: (returnval){ [ 976.400431] env[62740]: value = "task-640162" [ 976.400431] env[62740]: _type = "Task" [ 976.400431] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.408248] env[62740]: DEBUG oslo_vmware.api [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Task: {'id': task-640162, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.914371] env[62740]: DEBUG oslo_vmware.exceptions [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 976.914371] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.915025] env[62740]: ERROR nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 976.915025] env[62740]: Faults: ['InvalidArgument'] [ 976.915025] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Traceback (most recent call last): [ 976.915025] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 976.915025] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] yield resources [ 976.915025] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 976.915025] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] self.driver.spawn(context, instance, image_meta, [ 976.915025] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 976.915025] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] self._vmops.spawn(context, instance, image_meta, injected_files, [ 976.915025] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 976.915025] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] self._fetch_image_if_missing(context, vi) [ 976.915025] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 976.915343] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] image_cache(vi, tmp_image_ds_loc) [ 976.915343] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 976.915343] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] vm_util.copy_virtual_disk( [ 976.915343] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 976.915343] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] session._wait_for_task(vmdk_copy_task) [ 976.915343] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 976.915343] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] return self.wait_for_task(task_ref) [ 976.915343] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 976.915343] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] return evt.wait() [ 976.915343] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 976.915343] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] result = hub.switch() [ 976.915343] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 976.915343] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] return self.greenlet.switch() [ 976.915697] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 976.915697] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] self.f(*self.args, **self.kw) [ 976.915697] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 976.915697] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] raise exceptions.translate_fault(task_info.error) [ 976.915697] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 976.915697] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Faults: ['InvalidArgument'] [ 976.915697] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] [ 976.915697] env[62740]: INFO nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Terminating instance [ 976.917281] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.917541] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 976.918180] env[62740]: DEBUG nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 976.918440] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 976.918674] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8aed2c77-1bfa-4ffe-8dac-80c43a7ff5ec {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.920998] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71bd85ed-d694-469e-a8b4-61245dd51ccd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.928134] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 976.929195] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25745d70-21b3-4881-af20-f516a0605f30 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.930714] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 976.930963] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 976.931629] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90c40f2b-df82-43f4-bc4a-188945ec786f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.936409] env[62740]: DEBUG oslo_vmware.api [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 976.936409] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52ac1ed5-496b-05c1-f2ee-b73240711a59" [ 976.936409] env[62740]: _type = "Task" [ 976.936409] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.943685] env[62740]: DEBUG oslo_vmware.api [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52ac1ed5-496b-05c1-f2ee-b73240711a59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.003577] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 977.003755] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 977.003941] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Deleting the datastore file [datastore2] 75050b95-60c6-4e44-a1d5-0d47492dd739 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 977.004223] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef05c4c9-4aac-4857-bb2d-6a32b36fc4c9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.010270] env[62740]: DEBUG oslo_vmware.api [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Waiting for the task: (returnval){ [ 977.010270] env[62740]: value = "task-640164" [ 977.010270] env[62740]: _type = "Task" [ 977.010270] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.017653] env[62740]: DEBUG oslo_vmware.api [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Task: {'id': task-640164, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.446742] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 977.447017] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating directory with path [datastore2] vmware_temp/b12afd9e-db23-4a8f-a259-98e7e7fa2cf1/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 977.447265] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25671619-d536-463d-a3d7-39180e723ce2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.459341] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Created directory with path [datastore2] vmware_temp/b12afd9e-db23-4a8f-a259-98e7e7fa2cf1/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 977.459549] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Fetch image to [datastore2] vmware_temp/b12afd9e-db23-4a8f-a259-98e7e7fa2cf1/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 977.459724] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/b12afd9e-db23-4a8f-a259-98e7e7fa2cf1/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 977.460482] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8787dcd0-41ba-4d84-aac5-67fe76c8adb2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.466936] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af37af5-d231-4abb-ba96-d7fd0442cf51 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.476027] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0690f54a-ce47-4190-8d9b-49081f6e9b02 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.505903] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e882a38c-3d9a-4a52-b03d-da345147bc39 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.514341] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6779e691-bdc3-4c2f-b6e1-21855788502a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.520920] env[62740]: DEBUG oslo_vmware.api [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Task: {'id': task-640164, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068225} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.521179] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 977.521373] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 977.521586] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 977.521783] env[62740]: INFO nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Took 0.60 seconds to destroy the instance on the hypervisor. [ 977.523895] env[62740]: DEBUG nova.compute.claims [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 977.524076] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.524292] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.526255] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 5f21a4fa944340e1a1db64e36da1e0bb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 977.537216] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 977.567661] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f21a4fa944340e1a1db64e36da1e0bb [ 977.598287] env[62740]: DEBUG oslo_vmware.rw_handles [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b12afd9e-db23-4a8f-a259-98e7e7fa2cf1/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 977.664941] env[62740]: DEBUG oslo_vmware.rw_handles [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 977.665212] env[62740]: DEBUG oslo_vmware.rw_handles [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b12afd9e-db23-4a8f-a259-98e7e7fa2cf1/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 977.922566] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386fd358-f8ab-4a80-b842-f36f7093d39d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.929466] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e041fd3-4845-49f2-b7d4-0da2b55d43e4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.957749] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-394e9f61-9bc5-4971-8aa3-95b572b605c6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.964646] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fcc06fe-3858-4c53-b5f3-f1f631591b58 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.977334] env[62740]: DEBUG nova.compute.provider_tree [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.977838] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 67ad8d12bf214483a68f1e3021d79fe4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 977.987263] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67ad8d12bf214483a68f1e3021d79fe4 [ 977.988198] env[62740]: DEBUG nova.scheduler.client.report [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 977.990491] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg b9cb16b60aec4f5a8e645b177060dca9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 978.003788] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9cb16b60aec4f5a8e645b177060dca9 [ 978.004794] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.480s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.005382] env[62740]: ERROR nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 978.005382] env[62740]: Faults: ['InvalidArgument'] [ 978.005382] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Traceback (most recent call last): [ 978.005382] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 978.005382] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] self.driver.spawn(context, instance, image_meta, [ 978.005382] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 978.005382] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] self._vmops.spawn(context, instance, image_meta, injected_files, [ 978.005382] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 978.005382] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] self._fetch_image_if_missing(context, vi) [ 978.005382] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 978.005382] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] image_cache(vi, tmp_image_ds_loc) [ 978.005382] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 978.005748] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] vm_util.copy_virtual_disk( [ 978.005748] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 978.005748] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] session._wait_for_task(vmdk_copy_task) [ 978.005748] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 978.005748] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] return self.wait_for_task(task_ref) [ 978.005748] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 978.005748] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] return evt.wait() [ 978.005748] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 978.005748] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] result = hub.switch() [ 978.005748] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 978.005748] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] return self.greenlet.switch() [ 978.005748] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 978.005748] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] self.f(*self.args, **self.kw) [ 978.006209] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 978.006209] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] raise exceptions.translate_fault(task_info.error) [ 978.006209] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 978.006209] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Faults: ['InvalidArgument'] [ 978.006209] env[62740]: ERROR nova.compute.manager [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] [ 978.006209] env[62740]: DEBUG nova.compute.utils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 978.007648] env[62740]: DEBUG nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Build of instance 75050b95-60c6-4e44-a1d5-0d47492dd739 was re-scheduled: A specified parameter was not correct: fileType [ 978.007648] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 978.008048] env[62740]: DEBUG nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 978.008230] env[62740]: DEBUG nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 978.008416] env[62740]: DEBUG nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 978.008589] env[62740]: DEBUG nova.network.neutron [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 978.353566] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 087e4d3033614e2c95bb2eed483fda40 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 978.371165] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 087e4d3033614e2c95bb2eed483fda40 [ 978.371698] env[62740]: DEBUG nova.network.neutron [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.372191] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 440725e97269449a8ac5be1a97916a83 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 978.388149] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 440725e97269449a8ac5be1a97916a83 [ 978.388899] env[62740]: INFO nova.compute.manager [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Took 0.38 seconds to deallocate network for instance. [ 978.390675] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 61119278310b4fb09543946136b539ad in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 978.429440] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61119278310b4fb09543946136b539ad [ 978.432142] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 583c118b430942fa96d549057156a0dc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 978.463583] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 583c118b430942fa96d549057156a0dc [ 978.489713] env[62740]: INFO nova.scheduler.client.report [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Deleted allocations for instance 75050b95-60c6-4e44-a1d5-0d47492dd739 [ 978.496769] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 1f891aaa4f954fa6b8cee506c095214c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 978.520048] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f891aaa4f954fa6b8cee506c095214c [ 978.520048] env[62740]: DEBUG oslo_concurrency.lockutils [None req-031f14a9-3a65-4b87-93fe-1480fa6d6bf5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Lock "75050b95-60c6-4e44-a1d5-0d47492dd739" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 377.511s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.520417] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg cd5a07ae588e423bb1cd65e86e7a4f11 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 978.521255] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Lock "75050b95-60c6-4e44-a1d5-0d47492dd739" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 177.111s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.521412] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Acquiring lock "75050b95-60c6-4e44-a1d5-0d47492dd739-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.521614] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Lock "75050b95-60c6-4e44-a1d5-0d47492dd739-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.522165] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Lock "75050b95-60c6-4e44-a1d5-0d47492dd739-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.528350] env[62740]: INFO nova.compute.manager [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Terminating instance [ 978.530186] env[62740]: DEBUG nova.compute.manager [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 978.530640] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 978.531042] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bbb20f59-a6f9-4204-98fa-2bf3eba05372 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.542124] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a5fb8d-160c-4cad-a50d-f1fc166886ee {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.561955] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd5a07ae588e423bb1cd65e86e7a4f11 [ 978.562781] env[62740]: DEBUG nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 978.565437] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 94ec53f10e7f4a77bec5476f9f1bc208 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 978.584888] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 75050b95-60c6-4e44-a1d5-0d47492dd739 could not be found. [ 978.584888] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 978.584888] env[62740]: INFO nova.compute.manager [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Took 0.05 seconds to destroy the instance on the hypervisor. [ 978.585057] env[62740]: DEBUG oslo.service.loopingcall [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 978.585245] env[62740]: DEBUG nova.compute.manager [-] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 978.585345] env[62740]: DEBUG nova.network.neutron [-] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 978.604029] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94ec53f10e7f4a77bec5476f9f1bc208 [ 978.616230] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 687733ecdb0641429dd4f32b80eb7d5f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 978.621501] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.621744] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.623200] env[62740]: INFO nova.compute.claims [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 978.624783] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg ff877c384f1b4704b7d81a799a310cbc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 978.626387] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 687733ecdb0641429dd4f32b80eb7d5f [ 978.626766] env[62740]: DEBUG nova.network.neutron [-] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.627104] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e89602732b554f71ba0ce34d921facca in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 978.634857] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e89602732b554f71ba0ce34d921facca [ 978.635474] env[62740]: INFO nova.compute.manager [-] [instance: 75050b95-60c6-4e44-a1d5-0d47492dd739] Took 0.05 seconds to deallocate network for instance. [ 978.639344] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 6bca48e448394f149962540d36bde3d5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 978.675042] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff877c384f1b4704b7d81a799a310cbc [ 978.676845] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg e2101745eb8b4a6b9837ba8df2f896db in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 978.685621] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bca48e448394f149962540d36bde3d5 [ 978.689339] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2101745eb8b4a6b9837ba8df2f896db [ 978.699175] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg df1574d7f24e4ffe8d78d32a87128190 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 978.738165] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df1574d7f24e4ffe8d78d32a87128190 [ 978.744104] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Lock "75050b95-60c6-4e44-a1d5-0d47492dd739" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.222s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.744104] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c011d844-f52d-43d5-ba7e-2529012b7df5 tempest-ServerDiagnosticsNegativeTest-2017090350 tempest-ServerDiagnosticsNegativeTest-2017090350-project-member] Expecting reply to msg 6f6ebc3d3d0042cebaca70f8f66ef55e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 978.758953] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f6ebc3d3d0042cebaca70f8f66ef55e [ 978.996044] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de4911f-7e0b-4459-ab4e-f78aec9f78ed {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.004030] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f701e0-f65d-401b-b179-dfac230d2aa9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.034424] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5fae6ff-81bd-4a9d-b9f8-46055bac6317 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.041998] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81cee0b9-78da-401b-bb36-90859ece9c73 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.055446] env[62740]: DEBUG nova.compute.provider_tree [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 979.056686] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 7bc2e07013a043d89435eb58b6144eed in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 979.065009] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7bc2e07013a043d89435eb58b6144eed [ 979.065921] env[62740]: DEBUG nova.scheduler.client.report [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 979.068397] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg c7951f7fa2494cd294e04dfae45d970d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 979.082302] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7951f7fa2494cd294e04dfae45d970d [ 979.083013] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.461s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.083485] env[62740]: DEBUG nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 979.085189] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 52a76a9711324e6ab2d33d3b33348a2f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 979.113471] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52a76a9711324e6ab2d33d3b33348a2f [ 979.115301] env[62740]: DEBUG nova.compute.utils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 979.115937] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 65281a5f1c8d449b964366ac976a432f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 979.117044] env[62740]: DEBUG nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 979.117222] env[62740]: DEBUG nova.network.neutron [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 979.128126] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65281a5f1c8d449b964366ac976a432f [ 979.128753] env[62740]: DEBUG nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 979.130396] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg a9e24e868c0947ffb9f57b102595ce98 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 979.165344] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9e24e868c0947ffb9f57b102595ce98 [ 979.168361] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 376700760b274a27b3741aac3ef0f975 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 979.195671] env[62740]: DEBUG nova.policy [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '070a90ba779f4bc59053f8bffc95de94', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f735ac36a0d46269560f1209706fb69', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 979.207653] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 376700760b274a27b3741aac3ef0f975 [ 979.208931] env[62740]: DEBUG nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 979.243494] env[62740]: DEBUG nova.virt.hardware [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 979.244154] env[62740]: DEBUG nova.virt.hardware [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 979.244154] env[62740]: DEBUG nova.virt.hardware [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 979.244154] env[62740]: DEBUG nova.virt.hardware [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 979.244292] env[62740]: DEBUG nova.virt.hardware [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 979.244677] env[62740]: DEBUG nova.virt.hardware [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 979.244804] env[62740]: DEBUG nova.virt.hardware [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 979.245159] env[62740]: DEBUG nova.virt.hardware [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 979.245619] env[62740]: DEBUG nova.virt.hardware [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 979.245619] env[62740]: DEBUG nova.virt.hardware [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 979.245717] env[62740]: DEBUG nova.virt.hardware [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 979.247203] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83671d79-8935-4366-9985-e65a2e7bdd87 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.257918] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38836a1d-6b74-4fb5-b3ee-e1fca5d35a3c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.627658] env[62740]: DEBUG nova.network.neutron [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Successfully created port: b044227a-1c18-44a3-8e27-70c39a8f7efa {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 980.521843] env[62740]: DEBUG nova.compute.manager [req-a2ea8547-4edf-45e0-b202-4f4198f0c374 req-793e41ad-ae4e-44af-967e-fcabc3f6ee9b service nova] [instance: 5f57389d-853e-4439-872a-8345664578d0] Received event network-vif-plugged-b044227a-1c18-44a3-8e27-70c39a8f7efa {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 980.521843] env[62740]: DEBUG oslo_concurrency.lockutils [req-a2ea8547-4edf-45e0-b202-4f4198f0c374 req-793e41ad-ae4e-44af-967e-fcabc3f6ee9b service nova] Acquiring lock "5f57389d-853e-4439-872a-8345664578d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.523185] env[62740]: DEBUG oslo_concurrency.lockutils [req-a2ea8547-4edf-45e0-b202-4f4198f0c374 req-793e41ad-ae4e-44af-967e-fcabc3f6ee9b service nova] Lock "5f57389d-853e-4439-872a-8345664578d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.523185] env[62740]: DEBUG oslo_concurrency.lockutils [req-a2ea8547-4edf-45e0-b202-4f4198f0c374 req-793e41ad-ae4e-44af-967e-fcabc3f6ee9b service nova] Lock "5f57389d-853e-4439-872a-8345664578d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.523185] env[62740]: DEBUG nova.compute.manager [req-a2ea8547-4edf-45e0-b202-4f4198f0c374 req-793e41ad-ae4e-44af-967e-fcabc3f6ee9b service nova] [instance: 5f57389d-853e-4439-872a-8345664578d0] No waiting events found dispatching network-vif-plugged-b044227a-1c18-44a3-8e27-70c39a8f7efa {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 980.523185] env[62740]: WARNING nova.compute.manager [req-a2ea8547-4edf-45e0-b202-4f4198f0c374 req-793e41ad-ae4e-44af-967e-fcabc3f6ee9b service nova] [instance: 5f57389d-853e-4439-872a-8345664578d0] Received unexpected event network-vif-plugged-b044227a-1c18-44a3-8e27-70c39a8f7efa for instance with vm_state building and task_state spawning. [ 980.568153] env[62740]: DEBUG nova.network.neutron [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Successfully updated port: b044227a-1c18-44a3-8e27-70c39a8f7efa {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 980.568766] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 1b9d5ff2c89344409986cc94ea0c987f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 980.587065] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b9d5ff2c89344409986cc94ea0c987f [ 980.587698] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "refresh_cache-5f57389d-853e-4439-872a-8345664578d0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.590272] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquired lock "refresh_cache-5f57389d-853e-4439-872a-8345664578d0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.590409] env[62740]: DEBUG nova.network.neutron [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 980.590872] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg f933bb143d634efda64e30934d95afbe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 980.598396] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f933bb143d634efda64e30934d95afbe [ 980.633479] env[62740]: DEBUG nova.network.neutron [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 981.122048] env[62740]: DEBUG nova.network.neutron [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Updating instance_info_cache with network_info: [{"id": "b044227a-1c18-44a3-8e27-70c39a8f7efa", "address": "fa:16:3e:27:c3:87", "network": {"id": "f10f5770-f866-413b-86ce-20c3a1473482", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1756365504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f735ac36a0d46269560f1209706fb69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb044227a-1c", "ovs_interfaceid": "b044227a-1c18-44a3-8e27-70c39a8f7efa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.122609] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 3642d78b5f354c9dbbaad0c1c1b0cf95 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 981.138177] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3642d78b5f354c9dbbaad0c1c1b0cf95 [ 981.138893] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Releasing lock "refresh_cache-5f57389d-853e-4439-872a-8345664578d0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.139191] env[62740]: DEBUG nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Instance network_info: |[{"id": "b044227a-1c18-44a3-8e27-70c39a8f7efa", "address": "fa:16:3e:27:c3:87", "network": {"id": "f10f5770-f866-413b-86ce-20c3a1473482", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1756365504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f735ac36a0d46269560f1209706fb69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb044227a-1c", "ovs_interfaceid": "b044227a-1c18-44a3-8e27-70c39a8f7efa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 981.139628] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:c3:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b044227a-1c18-44a3-8e27-70c39a8f7efa', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 981.147620] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Creating folder: Project (1f735ac36a0d46269560f1209706fb69). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 981.148659] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9fb7d387-5f27-4672-99be-6a6524d590f5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.160415] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Created folder: Project (1f735ac36a0d46269560f1209706fb69) in parent group-v156037. [ 981.160591] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Creating folder: Instances. Parent ref: group-v156113. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 981.160810] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b494f17-795e-46bf-83bc-0da48e386385 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.169668] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Created folder: Instances in parent group-v156113. [ 981.169889] env[62740]: DEBUG oslo.service.loopingcall [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 981.170089] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f57389d-853e-4439-872a-8345664578d0] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 981.170294] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e47b8cf2-caaf-45ef-944c-35080e08a68e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.194610] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 981.194610] env[62740]: value = "task-640167" [ 981.194610] env[62740]: _type = "Task" [ 981.194610] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.202082] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640167, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.705486] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640167, 'name': CreateVM_Task, 'duration_secs': 0.289399} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.705757] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f57389d-853e-4439-872a-8345664578d0] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 981.706515] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.706678] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.706991] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 981.707251] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c054c403-7667-4de7-b2ae-b62ee2fc177e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.711793] env[62740]: DEBUG oslo_vmware.api [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for the task: (returnval){ [ 981.711793] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5214f3e0-240b-b7d5-235c-8202b7c1be64" [ 981.711793] env[62740]: _type = "Task" [ 981.711793] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.720827] env[62740]: DEBUG oslo_vmware.api [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5214f3e0-240b-b7d5-235c-8202b7c1be64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.222266] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.222529] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 982.222742] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.737613] env[62740]: DEBUG nova.compute.manager [req-78cb9888-337a-4abc-ac67-2da4c4f39589 req-8fd41988-1025-48dd-8f93-7a7726d7838d service nova] [instance: 5f57389d-853e-4439-872a-8345664578d0] Received event network-changed-b044227a-1c18-44a3-8e27-70c39a8f7efa {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 982.739686] env[62740]: DEBUG nova.compute.manager [req-78cb9888-337a-4abc-ac67-2da4c4f39589 req-8fd41988-1025-48dd-8f93-7a7726d7838d service nova] [instance: 5f57389d-853e-4439-872a-8345664578d0] Refreshing instance network info cache due to event network-changed-b044227a-1c18-44a3-8e27-70c39a8f7efa. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 982.739686] env[62740]: DEBUG oslo_concurrency.lockutils [req-78cb9888-337a-4abc-ac67-2da4c4f39589 req-8fd41988-1025-48dd-8f93-7a7726d7838d service nova] Acquiring lock "refresh_cache-5f57389d-853e-4439-872a-8345664578d0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.739686] env[62740]: DEBUG oslo_concurrency.lockutils [req-78cb9888-337a-4abc-ac67-2da4c4f39589 req-8fd41988-1025-48dd-8f93-7a7726d7838d service nova] Acquired lock "refresh_cache-5f57389d-853e-4439-872a-8345664578d0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.739686] env[62740]: DEBUG nova.network.neutron [req-78cb9888-337a-4abc-ac67-2da4c4f39589 req-8fd41988-1025-48dd-8f93-7a7726d7838d service nova] [instance: 5f57389d-853e-4439-872a-8345664578d0] Refreshing network info cache for port b044227a-1c18-44a3-8e27-70c39a8f7efa {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 982.739686] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-78cb9888-337a-4abc-ac67-2da4c4f39589 req-8fd41988-1025-48dd-8f93-7a7726d7838d service nova] Expecting reply to msg 21b712d0691d41cf9a05af0b6c65a440 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 982.747103] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21b712d0691d41cf9a05af0b6c65a440 [ 983.290552] env[62740]: DEBUG nova.network.neutron [req-78cb9888-337a-4abc-ac67-2da4c4f39589 req-8fd41988-1025-48dd-8f93-7a7726d7838d service nova] [instance: 5f57389d-853e-4439-872a-8345664578d0] Updated VIF entry in instance network info cache for port b044227a-1c18-44a3-8e27-70c39a8f7efa. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 983.290942] env[62740]: DEBUG nova.network.neutron [req-78cb9888-337a-4abc-ac67-2da4c4f39589 req-8fd41988-1025-48dd-8f93-7a7726d7838d service nova] [instance: 5f57389d-853e-4439-872a-8345664578d0] Updating instance_info_cache with network_info: [{"id": "b044227a-1c18-44a3-8e27-70c39a8f7efa", "address": "fa:16:3e:27:c3:87", "network": {"id": "f10f5770-f866-413b-86ce-20c3a1473482", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1756365504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f735ac36a0d46269560f1209706fb69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb044227a-1c", "ovs_interfaceid": "b044227a-1c18-44a3-8e27-70c39a8f7efa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.291602] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-78cb9888-337a-4abc-ac67-2da4c4f39589 req-8fd41988-1025-48dd-8f93-7a7726d7838d service nova] Expecting reply to msg 9c191ce3e3944a229756e4f680db815d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 983.302872] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c191ce3e3944a229756e4f680db815d [ 983.303558] env[62740]: DEBUG oslo_concurrency.lockutils [req-78cb9888-337a-4abc-ac67-2da4c4f39589 req-8fd41988-1025-48dd-8f93-7a7726d7838d service nova] Releasing lock "refresh_cache-5f57389d-853e-4439-872a-8345664578d0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.283728] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg 0f2a02a2920e4e2ba9e1de56a48de90b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 989.298993] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f2a02a2920e4e2ba9e1de56a48de90b [ 989.298993] env[62740]: DEBUG oslo_concurrency.lockutils [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Acquiring lock "732da1c8-e83e-4dd7-96c2-dbfa9468baab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.417089] env[62740]: WARNING oslo_vmware.rw_handles [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 989.417089] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 989.417089] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 989.417089] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 989.417089] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 989.417089] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 989.417089] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 989.417089] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 989.417089] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 989.417089] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 989.417089] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 989.417089] env[62740]: ERROR oslo_vmware.rw_handles [ 989.417089] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/7a6afe2b-c90b-417b-b977-8c542d8eddae/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 989.417856] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 989.418134] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Copying Virtual Disk [datastore1] vmware_temp/7a6afe2b-c90b-417b-b977-8c542d8eddae/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/7a6afe2b-c90b-417b-b977-8c542d8eddae/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 989.418836] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06b326bd-4fd7-4170-84a8-b8d0f13f074b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.427032] env[62740]: DEBUG oslo_vmware.api [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for the task: (returnval){ [ 989.427032] env[62740]: value = "task-640168" [ 989.427032] env[62740]: _type = "Task" [ 989.427032] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.435441] env[62740]: DEBUG oslo_vmware.api [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Task: {'id': task-640168, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.940772] env[62740]: DEBUG oslo_vmware.exceptions [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 989.941211] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.942036] env[62740]: ERROR nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 989.942036] env[62740]: Faults: ['InvalidArgument'] [ 989.942036] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Traceback (most recent call last): [ 989.942036] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 989.942036] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] yield resources [ 989.942036] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 989.942036] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] self.driver.spawn(context, instance, image_meta, [ 989.942036] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 989.942036] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 989.942036] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 989.942036] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] self._fetch_image_if_missing(context, vi) [ 989.942036] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 989.942465] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] image_cache(vi, tmp_image_ds_loc) [ 989.942465] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 989.942465] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] vm_util.copy_virtual_disk( [ 989.942465] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 989.942465] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] session._wait_for_task(vmdk_copy_task) [ 989.942465] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 989.942465] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] return self.wait_for_task(task_ref) [ 989.942465] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 989.942465] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] return evt.wait() [ 989.942465] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 989.942465] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] result = hub.switch() [ 989.942465] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 989.942465] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] return self.greenlet.switch() [ 989.942797] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 989.942797] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] self.f(*self.args, **self.kw) [ 989.942797] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 989.942797] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] raise exceptions.translate_fault(task_info.error) [ 989.942797] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 989.942797] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Faults: ['InvalidArgument'] [ 989.942797] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] [ 989.942797] env[62740]: INFO nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Terminating instance [ 989.944123] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.944339] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 989.944969] env[62740]: DEBUG nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 989.945429] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 989.945676] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de883c66-bd07-4d50-a635-f08fec6bb5af {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.948149] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-362eb67d-f923-45e2-9e5b-e1c7af0d45c1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.956940] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 989.957194] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3152a21-1f44-4bfd-bd4e-662d4dbb65c0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.959551] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 989.959731] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 989.960738] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a6fe143-7291-4a1d-9401-d30040cdbc5f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.965477] env[62740]: DEBUG oslo_vmware.api [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Waiting for the task: (returnval){ [ 989.965477] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]529382a5-cf39-9e81-762e-dc854c51ecf7" [ 989.965477] env[62740]: _type = "Task" [ 989.965477] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.982241] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 989.982241] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Creating directory with path [datastore1] vmware_temp/07172579-1799-400d-9f98-bf63e00a4b86/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 989.982241] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b89ad0d-d720-4f4a-b297-c5ac0ffa68c8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.001710] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Created directory with path [datastore1] vmware_temp/07172579-1799-400d-9f98-bf63e00a4b86/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 990.001931] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Fetch image to [datastore1] vmware_temp/07172579-1799-400d-9f98-bf63e00a4b86/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 990.002119] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/07172579-1799-400d-9f98-bf63e00a4b86/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 990.002994] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51fbf753-7581-4215-95c6-6be1b175da8f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.010495] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b6c3e3-43ea-4ffd-9983-6ca2a9fc805b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.019638] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e284e1c-d918-4ab8-bca5-7889803a9dde {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.056021] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0993d8cc-d050-4dbb-94a3-2f90b2dd4f06 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.056411] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 990.056618] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 990.056796] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Deleting the datastore file [datastore1] 26712c18-d9f4-4d7d-80fb-4d527da9c1e3 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.057043] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6dfaeff5-ebe8-404a-8cc7-b5f3be92348e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.062239] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-de8cc1c1-5c6a-479d-893b-14ab3d39403c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.065173] env[62740]: DEBUG oslo_vmware.api [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for the task: (returnval){ [ 990.065173] env[62740]: value = "task-640170" [ 990.065173] env[62740]: _type = "Task" [ 990.065173] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.072652] env[62740]: DEBUG oslo_vmware.api [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Task: {'id': task-640170, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.087882] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 990.156882] env[62740]: DEBUG oslo_vmware.rw_handles [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/07172579-1799-400d-9f98-bf63e00a4b86/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 990.218307] env[62740]: DEBUG oslo_vmware.rw_handles [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 990.218516] env[62740]: DEBUG oslo_vmware.rw_handles [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/07172579-1799-400d-9f98-bf63e00a4b86/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 990.576379] env[62740]: DEBUG oslo_vmware.api [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Task: {'id': task-640170, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072295} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.576711] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.576822] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 990.577012] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 990.577202] env[62740]: INFO nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Took 0.63 seconds to destroy the instance on the hypervisor. [ 990.579327] env[62740]: DEBUG nova.compute.claims [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 990.579506] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.579723] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.581814] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg cc6d4baec29844398405e4390a9067c3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 990.625781] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc6d4baec29844398405e4390a9067c3 [ 990.920419] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c0812d-5dfa-4e7b-8a2b-8011b189ee31 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.929421] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57703450-fe7a-420a-8143-1ecd60dfe9ea {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.958979] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc0f195-2f86-4bf4-905d-1d9cb59bd3a2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.966007] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f615e0ba-eeb0-48b2-8ade-277905f4ecd1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.978741] env[62740]: DEBUG nova.compute.provider_tree [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.979268] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg db7d263527034fd4b43c348de45d8a1b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 990.987085] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db7d263527034fd4b43c348de45d8a1b [ 990.988393] env[62740]: DEBUG nova.scheduler.client.report [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 990.991923] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 4aca7bec340a4794b3d67c97167d0f7d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 991.008028] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4aca7bec340a4794b3d67c97167d0f7d [ 991.008028] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.427s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.008028] env[62740]: ERROR nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 991.008028] env[62740]: Faults: ['InvalidArgument'] [ 991.008028] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Traceback (most recent call last): [ 991.008028] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 991.008028] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] self.driver.spawn(context, instance, image_meta, [ 991.008028] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 991.008028] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 991.008028] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 991.008543] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] self._fetch_image_if_missing(context, vi) [ 991.008543] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 991.008543] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] image_cache(vi, tmp_image_ds_loc) [ 991.008543] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 991.008543] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] vm_util.copy_virtual_disk( [ 991.008543] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 991.008543] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] session._wait_for_task(vmdk_copy_task) [ 991.008543] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 991.008543] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] return self.wait_for_task(task_ref) [ 991.008543] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 991.008543] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] return evt.wait() [ 991.008543] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 991.008543] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] result = hub.switch() [ 991.009098] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 991.009098] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] return self.greenlet.switch() [ 991.009098] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 991.009098] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] self.f(*self.args, **self.kw) [ 991.009098] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 991.009098] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] raise exceptions.translate_fault(task_info.error) [ 991.009098] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 991.009098] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Faults: ['InvalidArgument'] [ 991.009098] env[62740]: ERROR nova.compute.manager [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] [ 991.009098] env[62740]: DEBUG nova.compute.utils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 991.010406] env[62740]: DEBUG nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Build of instance 26712c18-d9f4-4d7d-80fb-4d527da9c1e3 was re-scheduled: A specified parameter was not correct: fileType [ 991.010406] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 991.010805] env[62740]: DEBUG nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 991.010981] env[62740]: DEBUG nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 991.011177] env[62740]: DEBUG nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 991.011346] env[62740]: DEBUG nova.network.neutron [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 991.363650] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg c61739e7b3f54a0a8aff35c25f817220 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 991.373790] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c61739e7b3f54a0a8aff35c25f817220 [ 991.374423] env[62740]: DEBUG nova.network.neutron [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.375013] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg ae5ecfec4b4f4a3b9ffd76ca0356f1f5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 991.387596] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae5ecfec4b4f4a3b9ffd76ca0356f1f5 [ 991.387596] env[62740]: INFO nova.compute.manager [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Took 0.37 seconds to deallocate network for instance. [ 991.388248] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg afc9a9048b954fd9a95f0258c1970c64 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 991.432024] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afc9a9048b954fd9a95f0258c1970c64 [ 991.434648] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg be45209d17b747a48e1a4bc70011e42d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 991.469669] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be45209d17b747a48e1a4bc70011e42d [ 991.509300] env[62740]: INFO nova.scheduler.client.report [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Deleted allocations for instance 26712c18-d9f4-4d7d-80fb-4d527da9c1e3 [ 991.522020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 1a467510754b4d7eaa6d85317206492d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 991.535021] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a467510754b4d7eaa6d85317206492d [ 991.535802] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7e23f07e-a863-4056-9518-ea6da4f68c0f tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "26712c18-d9f4-4d7d-80fb-4d527da9c1e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 372.113s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.536399] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 19b12af38e7b4acb9f546580038c0bf1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 991.539083] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "26712c18-d9f4-4d7d-80fb-4d527da9c1e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 174.460s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.539083] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "26712c18-d9f4-4d7d-80fb-4d527da9c1e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.539083] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "26712c18-d9f4-4d7d-80fb-4d527da9c1e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.539831] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "26712c18-d9f4-4d7d-80fb-4d527da9c1e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.541336] env[62740]: INFO nova.compute.manager [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Terminating instance [ 991.542086] env[62740]: DEBUG nova.compute.manager [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 991.542280] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 991.542758] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8beeddb3-75e9-41e2-b93d-3ba530c33615 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.552886] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d99b402-77ed-4369-9486-ce5c11eac40f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.564713] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19b12af38e7b4acb9f546580038c0bf1 [ 991.565286] env[62740]: DEBUG nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 991.566962] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg a8e738d704ea40fe84e41d1722e8aa83 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 991.587289] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 26712c18-d9f4-4d7d-80fb-4d527da9c1e3 could not be found. [ 991.587289] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 991.587289] env[62740]: INFO nova.compute.manager [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 991.587289] env[62740]: DEBUG oslo.service.loopingcall [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 991.587289] env[62740]: DEBUG nova.compute.manager [-] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 991.587802] env[62740]: DEBUG nova.network.neutron [-] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 991.599200] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8e738d704ea40fe84e41d1722e8aa83 [ 991.611805] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 47c3d8aa75104741807a5953ad46dfbf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 991.619016] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.619258] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.620714] env[62740]: INFO nova.compute.claims [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 991.622297] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 561f35bf0a87430fad02867bbf33f969 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 991.623379] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47c3d8aa75104741807a5953ad46dfbf [ 991.623717] env[62740]: DEBUG nova.network.neutron [-] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.624349] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 552c8848752e43bdbdfcf89187567e27 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 991.631843] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 552c8848752e43bdbdfcf89187567e27 [ 991.632278] env[62740]: INFO nova.compute.manager [-] [instance: 26712c18-d9f4-4d7d-80fb-4d527da9c1e3] Took 0.05 seconds to deallocate network for instance. [ 991.635611] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg e3ef07daf7394cebb2d3b3264c35f9d2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 991.681745] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 561f35bf0a87430fad02867bbf33f969 [ 991.682582] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3ef07daf7394cebb2d3b3264c35f9d2 [ 991.684384] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 2711e325f14f468ea2bc3c8a73efdd07 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 991.693040] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2711e325f14f468ea2bc3c8a73efdd07 [ 991.700171] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 2ebdd0c0d7e745599127c4e73aa39ccf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 991.743461] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ebdd0c0d7e745599127c4e73aa39ccf [ 991.749067] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "26712c18-d9f4-4d7d-80fb-4d527da9c1e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.212s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.749401] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ab30e142-d8fc-4521-8542-6420364e7a51 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 09813352a0c9432088fb2372e648f486 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 991.766480] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09813352a0c9432088fb2372e648f486 [ 991.981231] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128cb179-196e-4248-917a-28223ad3a018 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.988864] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e41a86-fd7a-4ca7-9a42-8b849ae896fb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.019030] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a704c0b6-c815-432f-a2b5-768edcc32bd4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.025946] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85db3ffc-d972-4873-bc3b-2676af5832c7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.039017] env[62740]: DEBUG nova.compute.provider_tree [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.039528] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg dc0f36ed7f3f43149e9b229187a3d67c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 992.049117] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc0f36ed7f3f43149e9b229187a3d67c [ 992.049117] env[62740]: DEBUG nova.scheduler.client.report [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 992.050972] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 2242484409eb457c92cfefeec48abdb7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 992.063219] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2242484409eb457c92cfefeec48abdb7 [ 992.063938] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.445s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.064424] env[62740]: DEBUG nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 992.066064] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 81d0d2331b2040fab36811b123a29a14 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 992.103404] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81d0d2331b2040fab36811b123a29a14 [ 992.105082] env[62740]: DEBUG nova.compute.utils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 992.105480] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 4b5b29c5c753405db1b030b54009fddf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 992.106382] env[62740]: DEBUG nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 992.106598] env[62740]: DEBUG nova.network.neutron [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 992.114684] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b5b29c5c753405db1b030b54009fddf [ 992.115212] env[62740]: DEBUG nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 992.116852] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 4f30c8dcf3aa485db125474b37127e2a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 992.145526] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f30c8dcf3aa485db125474b37127e2a [ 992.148383] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 92a374cc83094e649b9408a4048d2f3f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 992.178051] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92a374cc83094e649b9408a4048d2f3f [ 992.178868] env[62740]: DEBUG nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 992.202407] env[62740]: DEBUG nova.policy [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a9c404870f54f2e8d4d7c7b0df334f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f38588bc783140a38d77f8967add27d0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 992.205571] env[62740]: DEBUG nova.virt.hardware [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:31:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='91a56551-3d63-446b-ad4a-8ded67a00313',id=38,is_public=True,memory_mb=128,name='tempest-test_resize_flavor_-1173285169',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 992.205939] env[62740]: DEBUG nova.virt.hardware [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 992.206204] env[62740]: DEBUG nova.virt.hardware [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 992.206523] env[62740]: DEBUG nova.virt.hardware [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 992.206744] env[62740]: DEBUG nova.virt.hardware [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 992.206998] env[62740]: DEBUG nova.virt.hardware [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 992.207298] env[62740]: DEBUG nova.virt.hardware [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 992.207531] env[62740]: DEBUG nova.virt.hardware [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 992.207767] env[62740]: DEBUG nova.virt.hardware [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 992.207952] env[62740]: DEBUG nova.virt.hardware [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 992.208147] env[62740]: DEBUG nova.virt.hardware [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 992.209363] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42065e49-54a2-4135-a86d-d8e5689760c3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.217507] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca58bb0-0693-4ed4-a365-b3baef56efa4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.594822] env[62740]: DEBUG nova.network.neutron [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Successfully created port: db31340c-a799-465b-80f0-8379aac78e88 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 993.350225] env[62740]: DEBUG nova.network.neutron [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Successfully updated port: db31340c-a799-465b-80f0-8379aac78e88 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 993.350744] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 7c1824bccae34263953f7d343b14c7de in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 993.365304] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c1824bccae34263953f7d343b14c7de [ 993.366262] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "refresh_cache-472cd209-4192-4473-b788-d1ea342653bf" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.366506] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquired lock "refresh_cache-472cd209-4192-4473-b788-d1ea342653bf" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.366947] env[62740]: DEBUG nova.network.neutron [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 993.367774] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg d8cee06d6c164035b19a70dc265d7643 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 993.376148] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8cee06d6c164035b19a70dc265d7643 [ 993.433795] env[62740]: DEBUG nova.network.neutron [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 993.617091] env[62740]: DEBUG nova.compute.manager [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Received event network-vif-plugged-db31340c-a799-465b-80f0-8379aac78e88 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 993.617393] env[62740]: DEBUG oslo_concurrency.lockutils [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] Acquiring lock "472cd209-4192-4473-b788-d1ea342653bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.618032] env[62740]: DEBUG oslo_concurrency.lockutils [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] Lock "472cd209-4192-4473-b788-d1ea342653bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.618410] env[62740]: DEBUG oslo_concurrency.lockutils [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] Lock "472cd209-4192-4473-b788-d1ea342653bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.618626] env[62740]: DEBUG nova.compute.manager [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] [instance: 472cd209-4192-4473-b788-d1ea342653bf] No waiting events found dispatching network-vif-plugged-db31340c-a799-465b-80f0-8379aac78e88 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 993.618802] env[62740]: WARNING nova.compute.manager [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Received unexpected event network-vif-plugged-db31340c-a799-465b-80f0-8379aac78e88 for instance with vm_state building and task_state spawning. [ 993.618969] env[62740]: DEBUG nova.compute.manager [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Received event network-changed-db31340c-a799-465b-80f0-8379aac78e88 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 993.619140] env[62740]: DEBUG nova.compute.manager [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Refreshing instance network info cache due to event network-changed-db31340c-a799-465b-80f0-8379aac78e88. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 993.619312] env[62740]: DEBUG oslo_concurrency.lockutils [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] Acquiring lock "refresh_cache-472cd209-4192-4473-b788-d1ea342653bf" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.681986] env[62740]: DEBUG nova.network.neutron [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Updating instance_info_cache with network_info: [{"id": "db31340c-a799-465b-80f0-8379aac78e88", "address": "fa:16:3e:dc:d9:1b", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb31340c-a7", "ovs_interfaceid": "db31340c-a799-465b-80f0-8379aac78e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.681986] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg ef145e87bc3049749c855482b4cf00e9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 993.693665] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef145e87bc3049749c855482b4cf00e9 [ 993.694276] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Releasing lock "refresh_cache-472cd209-4192-4473-b788-d1ea342653bf" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.694536] env[62740]: DEBUG nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Instance network_info: |[{"id": "db31340c-a799-465b-80f0-8379aac78e88", "address": "fa:16:3e:dc:d9:1b", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb31340c-a7", "ovs_interfaceid": "db31340c-a799-465b-80f0-8379aac78e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 993.694820] env[62740]: DEBUG oslo_concurrency.lockutils [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] Acquired lock "refresh_cache-472cd209-4192-4473-b788-d1ea342653bf" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.694998] env[62740]: DEBUG nova.network.neutron [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Refreshing network info cache for port db31340c-a799-465b-80f0-8379aac78e88 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 993.695407] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] Expecting reply to msg 2b15985dc2654b02b58504d4421cf058 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 993.700255] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:d9:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'db31340c-a799-465b-80f0-8379aac78e88', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 993.710785] env[62740]: DEBUG oslo.service.loopingcall [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 993.711591] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b15985dc2654b02b58504d4421cf058 [ 993.714571] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 993.715353] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-baf0f1ef-236d-4e79-a64b-d3275b1bf361 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.735963] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 993.735963] env[62740]: value = "task-640171" [ 993.735963] env[62740]: _type = "Task" [ 993.735963] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.743376] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640171, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.247572] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640171, 'name': CreateVM_Task, 'duration_secs': 0.310754} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.250528] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 994.251180] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.251441] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.251650] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 994.252238] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7f0de5b-dac0-4548-9925-7882bd96a29d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.256893] env[62740]: DEBUG oslo_vmware.api [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Waiting for the task: (returnval){ [ 994.256893] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]526836cd-c01c-6ab9-32c2-21b581019a9d" [ 994.256893] env[62740]: _type = "Task" [ 994.256893] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.265395] env[62740]: DEBUG oslo_vmware.api [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]526836cd-c01c-6ab9-32c2-21b581019a9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.461149] env[62740]: DEBUG nova.network.neutron [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Updated VIF entry in instance network info cache for port db31340c-a799-465b-80f0-8379aac78e88. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 994.461525] env[62740]: DEBUG nova.network.neutron [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Updating instance_info_cache with network_info: [{"id": "db31340c-a799-465b-80f0-8379aac78e88", "address": "fa:16:3e:dc:d9:1b", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb31340c-a7", "ovs_interfaceid": "db31340c-a799-465b-80f0-8379aac78e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.462300] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] Expecting reply to msg 815d8c24e1d04404b36c0bbb31197969 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 994.472325] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 815d8c24e1d04404b36c0bbb31197969 [ 994.473030] env[62740]: DEBUG oslo_concurrency.lockutils [req-06f8d1fd-5944-4556-84a7-629e7a684857 req-aee0b421-c660-4c5d-9cd8-95013811c4d4 service nova] Releasing lock "refresh_cache-472cd209-4192-4473-b788-d1ea342653bf" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.768459] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.768459] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 994.768812] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.437658] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "b0b16f66-8dbc-4e9b-a932-5de45215cfff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.437897] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "b0b16f66-8dbc-4e9b-a932-5de45215cfff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.752494] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "56106517-e735-4bf5-8d5a-dc0d4aab3991" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.752925] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "56106517-e735-4bf5-8d5a-dc0d4aab3991" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.890284] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1006.891550] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1006.891550] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1006.891550] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1006.891550] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg d07a2d0705514eabb4106a7aeed27b25 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1006.905178] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d07a2d0705514eabb4106a7aeed27b25 [ 1006.906276] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.906480] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.906660] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.906821] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1006.907928] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16512d1b-56a7-46da-9703-65b6cdf39c9d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.916921] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3a2431-ee1e-4c39-9a0e-84c7d0edf97a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.930840] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13005325-b6e5-4eee-a612-4c8491bb6452 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.937271] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228f1284-72f0-433d-9176-66c8bee2a845 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.967482] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181678MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1006.968675] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.968675] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.968675] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 48e8484883854ed5ad7e6aa49cde67a3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.028252] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48e8484883854ed5ad7e6aa49cde67a3 [ 1007.032947] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ce0aa7a8e3884b5099c87b0e6c1cb51b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.045052] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce0aa7a8e3884b5099c87b0e6c1cb51b [ 1007.079190] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 8053d2ae-ca61-4282-aa89-83f3a2e107bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.080050] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a24df1e4-2865-4ab3-beae-0892dca12bef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.080050] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 68aa9321-22ce-45a0-8323-fa8564dca46b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.080050] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 00085df9-ce61-4ccc-8ecf-16956109eb8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.080050] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 697e3884-2ef4-423e-af81-e5d1e94f65a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.080312] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f22357ec-450c-4545-8822-74b83bfc5a35 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.080312] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 732da1c8-e83e-4dd7-96c2-dbfa9468baab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.080312] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388a39df-9fa9-4153-9f3c-4ad94fd5edfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.080312] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 5f57389d-853e-4439-872a-8345664578d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.080470] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 472cd209-4192-4473-b788-d1ea342653bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.080904] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7fbf889c6f08428a8f01cecc65b990bb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.095789] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7fbf889c6f08428a8f01cecc65b990bb [ 1007.096614] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d8dac9af-0897-4fbf-8ee6-1fb3955d48c0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.097149] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg effbe419a31d43ffa299b5d6d9e973ff in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.106984] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg effbe419a31d43ffa299b5d6d9e973ff [ 1007.108802] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2162ea32-6407-4286-9340-b62a9ec0988e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.108802] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 153724669e264f40a0c3f63d724fe63c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.118352] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 153724669e264f40a0c3f63d724fe63c [ 1007.119027] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d2669ffb-41b1-474d-bb7a-fabae11e69d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.119556] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f0460b021a9b4e39a301a845e75a1a9e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.129210] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0460b021a9b4e39a301a845e75a1a9e [ 1007.129893] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f7574228-f7fc-4ab0-9a38-7671046d46a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.130444] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg a50225b3414745e787b519616a865ff4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.139575] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a50225b3414745e787b519616a865ff4 [ 1007.140202] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b1b86050-2bb1-443b-967b-12531d71ba04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.140687] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 742d85c3f7024b30b4a22541a58a0d24 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.149379] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 742d85c3f7024b30b4a22541a58a0d24 [ 1007.149991] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 285dcc26-a4e9-40bc-82dd-37931f46e7fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.150447] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg d3a16a8fce4546d7ac4f2d2823117965 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.159066] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3a16a8fce4546d7ac4f2d2823117965 [ 1007.159670] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b0995d6c-a700-47a3-a39d-6a6fe1462042 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.160122] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ef5bd4493131494cb98f3911398fac11 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.168945] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef5bd4493131494cb98f3911398fac11 [ 1007.169592] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d6c3ca16-5c7c-41e6-9850-10221603ad2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.170062] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 1aeb79968c644539bdd09c931bc524b1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.178712] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1aeb79968c644539bdd09c931bc524b1 [ 1007.179355] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d60c8e65-1eb3-4017-b28e-8b72b0b4b2e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.179820] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg bcdb53fa293244e9922c8266809411cb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.188753] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcdb53fa293244e9922c8266809411cb [ 1007.189401] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6531eee8-d8ec-4a9d-911c-d7d9b88baf19 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.189880] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b4ad8312899f4c85a72e915653b33b58 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.202030] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4ad8312899f4c85a72e915653b33b58 [ 1007.202470] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 158406db-7196-4826-aefa-20a58daa186b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.202944] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5915d5583f2845209f967170e0fd1c1b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.213921] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5915d5583f2845209f967170e0fd1c1b [ 1007.214744] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance cf00af51-2b31-4b99-a692-8b0851dd74b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.215418] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg bf81ef94d1cb48be910ca871caa0ea39 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.226409] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf81ef94d1cb48be910ca871caa0ea39 [ 1007.226409] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6ca702af-1a5c-40bb-b6c7-2f55ca308c02 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.226573] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0c3370934dab43ebb85bdf87a00da34f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.235676] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c3370934dab43ebb85bdf87a00da34f [ 1007.236344] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e21a5624-20ca-45d8-a0bf-dd87cec1c701 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.236867] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f4560f9739e6429e826d0e0d2eb4c5f6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.246799] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4560f9739e6429e826d0e0d2eb4c5f6 [ 1007.248375] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b0b16f66-8dbc-4e9b-a932-5de45215cfff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.248375] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 6d57ae48e873460db829062c8b13b042 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.257348] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d57ae48e873460db829062c8b13b042 [ 1007.258064] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 56106517-e735-4bf5-8d5a-dc0d4aab3991 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.258391] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1007.258559] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1007.675337] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790e526d-0587-40b1-bc80-049097d6497d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.684815] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d73ace-1b03-4d6a-9e05-9fdbd67b14cc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.716767] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40fa67c5-a4ef-4369-87af-b36a350e1268 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.724299] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788ebbe3-d8c7-4b02-8122-bdf78310b243 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.738137] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.738844] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 9f5e770ae31e4b3b9a7b4cf16d4f56de in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.750022] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f5e770ae31e4b3b9a7b4cf16d4f56de [ 1007.751132] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1007.753568] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0bc68b5fbfab427b9678a88c22559bcd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1007.770308] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0bc68b5fbfab427b9678a88c22559bcd [ 1007.771235] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1007.771978] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.804s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.774152] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1008.774152] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1008.774152] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1008.774152] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 68cad72493e4479190f3a473f9d65c07 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1008.794022] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68cad72493e4479190f3a473f9d65c07 [ 1008.796181] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.796361] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.796499] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.796630] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.796757] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.796879] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.797008] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.797137] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.797258] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 5f57389d-853e-4439-872a-8345664578d0] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.797407] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.797509] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1008.890933] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1008.891194] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1008.892520] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f31053cb66da4e49a07be97740cb1918 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1008.911829] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f31053cb66da4e49a07be97740cb1918 [ 1008.915321] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1008.916092] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1009.891620] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1009.891940] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1010.177867] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0b2b44fd6602438f88b236f588484d62 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1010.187719] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b2b44fd6602438f88b236f588484d62 [ 1022.143191] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 707c4cabe0ab423484e6e7b682cb229f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1022.153792] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 707c4cabe0ab423484e6e7b682cb229f [ 1022.154298] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "388a39df-9fa9-4153-9f3c-4ad94fd5edfb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.754888] env[62740]: WARNING oslo_vmware.rw_handles [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1023.754888] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1023.754888] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1023.754888] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1023.754888] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1023.754888] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1023.754888] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1023.754888] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1023.754888] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1023.754888] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1023.754888] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1023.754888] env[62740]: ERROR oslo_vmware.rw_handles [ 1023.755912] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/b12afd9e-db23-4a8f-a259-98e7e7fa2cf1/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1023.757306] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1023.757556] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Copying Virtual Disk [datastore2] vmware_temp/b12afd9e-db23-4a8f-a259-98e7e7fa2cf1/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/b12afd9e-db23-4a8f-a259-98e7e7fa2cf1/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1023.757839] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d40e1d8f-ad9d-4d2c-8a8b-a6482e555dbf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.765974] env[62740]: DEBUG oslo_vmware.api [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 1023.765974] env[62740]: value = "task-640172" [ 1023.765974] env[62740]: _type = "Task" [ 1023.765974] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.774073] env[62740]: DEBUG oslo_vmware.api [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': task-640172, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.276822] env[62740]: DEBUG oslo_vmware.exceptions [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1024.277125] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.277679] env[62740]: ERROR nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1024.277679] env[62740]: Faults: ['InvalidArgument'] [ 1024.277679] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Traceback (most recent call last): [ 1024.277679] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1024.277679] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] yield resources [ 1024.277679] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1024.277679] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] self.driver.spawn(context, instance, image_meta, [ 1024.277679] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1024.277679] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1024.277679] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1024.277679] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] self._fetch_image_if_missing(context, vi) [ 1024.277679] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1024.278099] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] image_cache(vi, tmp_image_ds_loc) [ 1024.278099] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1024.278099] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] vm_util.copy_virtual_disk( [ 1024.278099] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1024.278099] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] session._wait_for_task(vmdk_copy_task) [ 1024.278099] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1024.278099] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] return self.wait_for_task(task_ref) [ 1024.278099] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1024.278099] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] return evt.wait() [ 1024.278099] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1024.278099] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] result = hub.switch() [ 1024.278099] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1024.278099] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] return self.greenlet.switch() [ 1024.278526] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1024.278526] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] self.f(*self.args, **self.kw) [ 1024.278526] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1024.278526] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] raise exceptions.translate_fault(task_info.error) [ 1024.278526] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1024.278526] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Faults: ['InvalidArgument'] [ 1024.278526] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] [ 1024.278526] env[62740]: INFO nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Terminating instance [ 1024.279986] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.280177] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1024.280433] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf9af556-88c9-494d-a34f-d22c20aead19 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.282873] env[62740]: DEBUG nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1024.283116] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1024.283938] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e9b738-37bb-40de-98c9-9d83f0c5b561 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.291440] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1024.292556] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7da2731-2a56-4a42-b3d6-691f0b99646e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.293968] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1024.294165] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1024.294828] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fe2a2a4-f81a-4afa-aaff-309df0f2f9db {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.299860] env[62740]: DEBUG oslo_vmware.api [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Waiting for the task: (returnval){ [ 1024.299860] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5218bdc8-f11e-90ec-2cf2-641a494980e9" [ 1024.299860] env[62740]: _type = "Task" [ 1024.299860] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.307886] env[62740]: DEBUG oslo_vmware.api [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5218bdc8-f11e-90ec-2cf2-641a494980e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.357683] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1024.357808] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1024.357999] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Deleting the datastore file [datastore2] 8053d2ae-ca61-4282-aa89-83f3a2e107bc {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1024.358323] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46c53196-d766-451a-b492-da146550c4e9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.365598] env[62740]: DEBUG oslo_vmware.api [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 1024.365598] env[62740]: value = "task-640174" [ 1024.365598] env[62740]: _type = "Task" [ 1024.365598] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.372614] env[62740]: DEBUG oslo_vmware.api [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': task-640174, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.810919] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1024.810919] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Creating directory with path [datastore2] vmware_temp/a580d5e3-c768-4e6d-8ac3-a01fe42675e2/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1024.810919] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98365848-67cf-4fa7-9398-cdddfb9d6cc5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.822813] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Created directory with path [datastore2] vmware_temp/a580d5e3-c768-4e6d-8ac3-a01fe42675e2/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1024.823022] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Fetch image to [datastore2] vmware_temp/a580d5e3-c768-4e6d-8ac3-a01fe42675e2/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1024.823197] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/a580d5e3-c768-4e6d-8ac3-a01fe42675e2/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1024.823954] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc9bf48-1641-41a2-bc7d-ad38529f0056 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.830584] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf383bd-5994-4d17-b6c2-41c7a96fd861 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.839546] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894d06b0-23a5-48af-be82-808bab1a0fbb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.873179] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a563e3-5006-4676-9a97-1bb917bc401d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.880090] env[62740]: DEBUG oslo_vmware.api [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': task-640174, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074428} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.881519] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1024.881707] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1024.881882] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1024.882066] env[62740]: INFO nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1024.884813] env[62740]: DEBUG nova.compute.claims [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1024.885010] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.885235] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.887154] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg d8862a4a0e6e4a17b9ed3f4aa31b0822 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1024.888015] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6c9ebbb8-33a1-40b8-a1f8-478f5f20cdc9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.911064] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1024.927349] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8862a4a0e6e4a17b9ed3f4aa31b0822 [ 1024.964014] env[62740]: DEBUG oslo_vmware.rw_handles [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a580d5e3-c768-4e6d-8ac3-a01fe42675e2/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1025.026741] env[62740]: DEBUG oslo_vmware.rw_handles [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1025.026973] env[62740]: DEBUG oslo_vmware.rw_handles [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a580d5e3-c768-4e6d-8ac3-a01fe42675e2/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1025.290475] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9500572-8f27-470b-b745-a6d9680d8232 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.298457] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7913d4fc-f1b3-4487-98c5-6bbc198e3f82 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.327440] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8f50cb-54bb-4acb-ada5-85357b16384a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.334402] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40048d7-b903-4f93-862b-f80020cf9148 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.347682] env[62740]: DEBUG nova.compute.provider_tree [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1025.348112] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg d4baae1b69c34065b516811a170d1a9c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1025.359697] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4baae1b69c34065b516811a170d1a9c [ 1025.360645] env[62740]: DEBUG nova.scheduler.client.report [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1025.362979] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 2480f7c8ad1540bd84f5f6d70113fca9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1025.374420] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2480f7c8ad1540bd84f5f6d70113fca9 [ 1025.376022] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.490s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.376022] env[62740]: ERROR nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1025.376022] env[62740]: Faults: ['InvalidArgument'] [ 1025.376022] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Traceback (most recent call last): [ 1025.376022] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1025.376022] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] self.driver.spawn(context, instance, image_meta, [ 1025.376022] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1025.376022] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1025.376022] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1025.376022] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] self._fetch_image_if_missing(context, vi) [ 1025.376361] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1025.376361] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] image_cache(vi, tmp_image_ds_loc) [ 1025.376361] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1025.376361] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] vm_util.copy_virtual_disk( [ 1025.376361] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1025.376361] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] session._wait_for_task(vmdk_copy_task) [ 1025.376361] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1025.376361] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] return self.wait_for_task(task_ref) [ 1025.376361] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1025.376361] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] return evt.wait() [ 1025.376361] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1025.376361] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] result = hub.switch() [ 1025.376361] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1025.376665] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] return self.greenlet.switch() [ 1025.376665] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1025.376665] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] self.f(*self.args, **self.kw) [ 1025.376665] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1025.376665] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] raise exceptions.translate_fault(task_info.error) [ 1025.376665] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1025.376665] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Faults: ['InvalidArgument'] [ 1025.376665] env[62740]: ERROR nova.compute.manager [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] [ 1025.376665] env[62740]: DEBUG nova.compute.utils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1025.377963] env[62740]: DEBUG nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Build of instance 8053d2ae-ca61-4282-aa89-83f3a2e107bc was re-scheduled: A specified parameter was not correct: fileType [ 1025.377963] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1025.379031] env[62740]: DEBUG nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1025.379031] env[62740]: DEBUG nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1025.379031] env[62740]: DEBUG nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1025.379031] env[62740]: DEBUG nova.network.neutron [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1025.783287] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg f431e92de3c741d7ad1bbf7df217d288 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1025.795987] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f431e92de3c741d7ad1bbf7df217d288 [ 1025.796607] env[62740]: DEBUG nova.network.neutron [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.797108] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 438abb1a042243c4b70bcc475285e792 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1025.805939] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 438abb1a042243c4b70bcc475285e792 [ 1025.808275] env[62740]: INFO nova.compute.manager [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Took 0.43 seconds to deallocate network for instance. [ 1025.808275] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 7aaedaad20fb44ad92ad0d77411bdf5a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1025.852654] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7aaedaad20fb44ad92ad0d77411bdf5a [ 1025.855358] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 07e3963ce3f144b0a9a33fca008127ce in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1025.885121] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07e3963ce3f144b0a9a33fca008127ce [ 1025.912076] env[62740]: INFO nova.scheduler.client.report [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Deleted allocations for instance 8053d2ae-ca61-4282-aa89-83f3a2e107bc [ 1025.918724] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 3b89d8ab587c4b6db54a6fd4d39c3726 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1025.934345] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b89d8ab587c4b6db54a6fd4d39c3726 [ 1025.934959] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f1f41207-03cf-42b5-a170-2112dad4d81b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "8053d2ae-ca61-4282-aa89-83f3a2e107bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 422.521s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.935518] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 1f31cf7e82a5418c8908fa680e9deb87 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1025.936277] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "8053d2ae-ca61-4282-aa89-83f3a2e107bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 221.614s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.936526] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "8053d2ae-ca61-4282-aa89-83f3a2e107bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.936740] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "8053d2ae-ca61-4282-aa89-83f3a2e107bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.936907] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "8053d2ae-ca61-4282-aa89-83f3a2e107bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.938879] env[62740]: INFO nova.compute.manager [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Terminating instance [ 1025.940623] env[62740]: DEBUG nova.compute.manager [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1025.940880] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1025.941299] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fd9a4fd9-7544-4bb7-ba0e-583b78248600 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.950224] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ccba71a-f1dd-4e75-8f81-01b43d6262b2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.960488] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f31cf7e82a5418c8908fa680e9deb87 [ 1025.960974] env[62740]: DEBUG nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1025.962573] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 5f7f4f19f29640738804d2d69020dcc8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1025.980153] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8053d2ae-ca61-4282-aa89-83f3a2e107bc could not be found. [ 1025.980351] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1025.980575] env[62740]: INFO nova.compute.manager [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1025.980865] env[62740]: DEBUG oslo.service.loopingcall [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1025.981105] env[62740]: DEBUG nova.compute.manager [-] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1025.981207] env[62740]: DEBUG nova.network.neutron [-] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1025.996071] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f7f4f19f29640738804d2d69020dcc8 [ 1026.001914] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3547e5eee2b04906b7b4a442e639482b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1026.008416] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3547e5eee2b04906b7b4a442e639482b [ 1026.008765] env[62740]: DEBUG nova.network.neutron [-] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.009149] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 30305ed6ec8d4a32b829afd92c3174e1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1026.011505] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.011748] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.013230] env[62740]: INFO nova.compute.claims [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1026.014775] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 5f2617ba3dc844c9a9817a7d988d02ca in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1026.017150] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30305ed6ec8d4a32b829afd92c3174e1 [ 1026.017578] env[62740]: INFO nova.compute.manager [-] [instance: 8053d2ae-ca61-4282-aa89-83f3a2e107bc] Took 0.04 seconds to deallocate network for instance. [ 1026.021267] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 249067c40f304606a82a9c5f995b320e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1026.055033] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 249067c40f304606a82a9c5f995b320e [ 1026.068629] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 9ea8ea0613f64760bc0b21113f79a0af in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1026.070494] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f2617ba3dc844c9a9817a7d988d02ca [ 1026.071852] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg c3a4f538b9954c3f8e824c6c0e1f0640 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1026.078683] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3a4f538b9954c3f8e824c6c0e1f0640 [ 1026.109035] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ea8ea0613f64760bc0b21113f79a0af [ 1026.114162] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "8053d2ae-ca61-4282-aa89-83f3a2e107bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.178s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.114488] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2d920103-d762-48b2-97f5-9576e1df6a73 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 26308da315b7464ab85616016f59d252 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1026.123780] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26308da315b7464ab85616016f59d252 [ 1026.376498] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895847f3-37a2-4fe9-9abc-f55a70cb17ea {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.384175] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6a8218-5849-4a59-ad56-5fd5d4522df0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.414784] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1456f655-d797-4522-af15-9961d2b86eb6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.422250] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df981532-819c-4130-88ca-3ed57beaf3d3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.435287] env[62740]: DEBUG nova.compute.provider_tree [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.435790] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 18527e2f16004c96b0dd6e3511616028 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1026.444665] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18527e2f16004c96b0dd6e3511616028 [ 1026.445582] env[62740]: DEBUG nova.scheduler.client.report [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1026.447950] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg ba13a20bad184d0087e8067de4c48146 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1026.461841] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba13a20bad184d0087e8067de4c48146 [ 1026.462601] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.451s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.463083] env[62740]: DEBUG nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1026.464694] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 63ecdb65b55e4dc0affc6c0738867d02 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1026.504796] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63ecdb65b55e4dc0affc6c0738867d02 [ 1026.504978] env[62740]: DEBUG nova.compute.utils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1026.505556] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg e2beda28e695418b9ba8616c9686ec9d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1026.506887] env[62740]: DEBUG nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1026.506887] env[62740]: DEBUG nova.network.neutron [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1026.515610] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2beda28e695418b9ba8616c9686ec9d [ 1026.516155] env[62740]: DEBUG nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1026.517743] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 59c471e5a6934248b177b451614b69fb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1026.548397] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59c471e5a6934248b177b451614b69fb [ 1026.550978] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 7247c1be5fbb4088931d03b1f5828a33 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1026.581819] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7247c1be5fbb4088931d03b1f5828a33 [ 1026.582649] env[62740]: DEBUG nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1026.596708] env[62740]: DEBUG nova.policy [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96885eac4aea4c049695f190c31b8b0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c5b3436283d24d41ae0e599a35d1850c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1026.613458] env[62740]: DEBUG nova.virt.hardware [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1026.613725] env[62740]: DEBUG nova.virt.hardware [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1026.613857] env[62740]: DEBUG nova.virt.hardware [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1026.614056] env[62740]: DEBUG nova.virt.hardware [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1026.614211] env[62740]: DEBUG nova.virt.hardware [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1026.614362] env[62740]: DEBUG nova.virt.hardware [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1026.614579] env[62740]: DEBUG nova.virt.hardware [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1026.614742] env[62740]: DEBUG nova.virt.hardware [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1026.614911] env[62740]: DEBUG nova.virt.hardware [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1026.615278] env[62740]: DEBUG nova.virt.hardware [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1026.615500] env[62740]: DEBUG nova.virt.hardware [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1026.616393] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7976f8e7-f240-442e-bbb1-07d034dc679a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.624730] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d1c97b-bf76-4bdb-bd95-27b95ad1a60f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.961805] env[62740]: DEBUG nova.network.neutron [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Successfully created port: 08fe8b68-adea-43c7-b3c0-04100c2fae3e {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1027.349508] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 42afe55942684f7db8a750076c89cc1d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1027.362598] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42afe55942684f7db8a750076c89cc1d [ 1027.362820] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "5f57389d-853e-4439-872a-8345664578d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.953138] env[62740]: DEBUG nova.network.neutron [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Successfully updated port: 08fe8b68-adea-43c7-b3c0-04100c2fae3e {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1027.953635] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg eea2dfaa761a4383800d7a5dfdbe426c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1027.965607] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eea2dfaa761a4383800d7a5dfdbe426c [ 1027.966794] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "refresh_cache-d8dac9af-0897-4fbf-8ee6-1fb3955d48c0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.966794] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquired lock "refresh_cache-d8dac9af-0897-4fbf-8ee6-1fb3955d48c0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.967849] env[62740]: DEBUG nova.network.neutron [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1027.967849] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 126e54dc668b410084e7be2f2378169e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1027.978545] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 126e54dc668b410084e7be2f2378169e [ 1028.018143] env[62740]: DEBUG nova.compute.manager [req-5fac38fe-2b2b-4b15-8161-e6891eb08733 req-0cc4cffb-b554-4171-9afc-f73aebf6a5bf service nova] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Received event network-vif-plugged-08fe8b68-adea-43c7-b3c0-04100c2fae3e {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1028.018434] env[62740]: DEBUG oslo_concurrency.lockutils [req-5fac38fe-2b2b-4b15-8161-e6891eb08733 req-0cc4cffb-b554-4171-9afc-f73aebf6a5bf service nova] Acquiring lock "d8dac9af-0897-4fbf-8ee6-1fb3955d48c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.018585] env[62740]: DEBUG oslo_concurrency.lockutils [req-5fac38fe-2b2b-4b15-8161-e6891eb08733 req-0cc4cffb-b554-4171-9afc-f73aebf6a5bf service nova] Lock "d8dac9af-0897-4fbf-8ee6-1fb3955d48c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.018759] env[62740]: DEBUG oslo_concurrency.lockutils [req-5fac38fe-2b2b-4b15-8161-e6891eb08733 req-0cc4cffb-b554-4171-9afc-f73aebf6a5bf service nova] Lock "d8dac9af-0897-4fbf-8ee6-1fb3955d48c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.019019] env[62740]: DEBUG nova.compute.manager [req-5fac38fe-2b2b-4b15-8161-e6891eb08733 req-0cc4cffb-b554-4171-9afc-f73aebf6a5bf service nova] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] No waiting events found dispatching network-vif-plugged-08fe8b68-adea-43c7-b3c0-04100c2fae3e {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1028.019202] env[62740]: WARNING nova.compute.manager [req-5fac38fe-2b2b-4b15-8161-e6891eb08733 req-0cc4cffb-b554-4171-9afc-f73aebf6a5bf service nova] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Received unexpected event network-vif-plugged-08fe8b68-adea-43c7-b3c0-04100c2fae3e for instance with vm_state building and task_state spawning. [ 1028.023615] env[62740]: DEBUG nova.network.neutron [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1028.238463] env[62740]: DEBUG nova.network.neutron [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Updating instance_info_cache with network_info: [{"id": "08fe8b68-adea-43c7-b3c0-04100c2fae3e", "address": "fa:16:3e:4c:c0:9c", "network": {"id": "2df153a5-47a3-46f8-96b7-aa7f4f657f55", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1427651514-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5b3436283d24d41ae0e599a35d1850c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08fe8b68-ad", "ovs_interfaceid": "08fe8b68-adea-43c7-b3c0-04100c2fae3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.238990] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg b0e494cea7cf44ee993c9a17096fb7d2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1028.251371] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0e494cea7cf44ee993c9a17096fb7d2 [ 1028.251934] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Releasing lock "refresh_cache-d8dac9af-0897-4fbf-8ee6-1fb3955d48c0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.252234] env[62740]: DEBUG nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Instance network_info: |[{"id": "08fe8b68-adea-43c7-b3c0-04100c2fae3e", "address": "fa:16:3e:4c:c0:9c", "network": {"id": "2df153a5-47a3-46f8-96b7-aa7f4f657f55", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1427651514-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5b3436283d24d41ae0e599a35d1850c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08fe8b68-ad", "ovs_interfaceid": "08fe8b68-adea-43c7-b3c0-04100c2fae3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1028.252635] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:c0:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a2b284a-a29c-478f-b763-c9b5821e20ec', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08fe8b68-adea-43c7-b3c0-04100c2fae3e', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1028.260089] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Creating folder: Project (c5b3436283d24d41ae0e599a35d1850c). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1028.260586] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81332720-8b07-4530-af0d-5d1f3b56682a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.271578] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Created folder: Project (c5b3436283d24d41ae0e599a35d1850c) in parent group-v156037. [ 1028.271764] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Creating folder: Instances. Parent ref: group-v156117. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1028.271984] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8138fcd7-5f9a-493b-a10d-11cfc2329ef1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.280562] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Created folder: Instances in parent group-v156117. [ 1028.280827] env[62740]: DEBUG oslo.service.loopingcall [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1028.281035] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1028.281256] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd716ccf-cb74-4351-935a-9f0a261f2725 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.300308] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1028.300308] env[62740]: value = "task-640177" [ 1028.300308] env[62740]: _type = "Task" [ 1028.300308] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.308239] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640177, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.810974] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640177, 'name': CreateVM_Task, 'duration_secs': 0.29364} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.811170] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1028.811834] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.812009] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.812348] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1028.812599] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cefdc69c-f86c-4b99-a6b0-dec56973af43 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.817433] env[62740]: DEBUG oslo_vmware.api [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for the task: (returnval){ [ 1028.817433] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52a1ddb4-270f-3097-46b1-7008a3f1c0c2" [ 1028.817433] env[62740]: _type = "Task" [ 1028.817433] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.824945] env[62740]: DEBUG oslo_vmware.api [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52a1ddb4-270f-3097-46b1-7008a3f1c0c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.328120] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.328417] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1029.328647] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.047233] env[62740]: DEBUG nova.compute.manager [req-c0beb41a-8ef4-44b3-ab9c-59df34161b74 req-06d953ee-50ed-45c1-a3a9-4659d91d814b service nova] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Received event network-changed-08fe8b68-adea-43c7-b3c0-04100c2fae3e {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1030.047439] env[62740]: DEBUG nova.compute.manager [req-c0beb41a-8ef4-44b3-ab9c-59df34161b74 req-06d953ee-50ed-45c1-a3a9-4659d91d814b service nova] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Refreshing instance network info cache due to event network-changed-08fe8b68-adea-43c7-b3c0-04100c2fae3e. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1030.047655] env[62740]: DEBUG oslo_concurrency.lockutils [req-c0beb41a-8ef4-44b3-ab9c-59df34161b74 req-06d953ee-50ed-45c1-a3a9-4659d91d814b service nova] Acquiring lock "refresh_cache-d8dac9af-0897-4fbf-8ee6-1fb3955d48c0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.047800] env[62740]: DEBUG oslo_concurrency.lockutils [req-c0beb41a-8ef4-44b3-ab9c-59df34161b74 req-06d953ee-50ed-45c1-a3a9-4659d91d814b service nova] Acquired lock "refresh_cache-d8dac9af-0897-4fbf-8ee6-1fb3955d48c0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.048195] env[62740]: DEBUG nova.network.neutron [req-c0beb41a-8ef4-44b3-ab9c-59df34161b74 req-06d953ee-50ed-45c1-a3a9-4659d91d814b service nova] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Refreshing network info cache for port 08fe8b68-adea-43c7-b3c0-04100c2fae3e {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1030.048743] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-c0beb41a-8ef4-44b3-ab9c-59df34161b74 req-06d953ee-50ed-45c1-a3a9-4659d91d814b service nova] Expecting reply to msg 6c1ddb6517a9406eabc979acf464cd98 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1030.057103] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c1ddb6517a9406eabc979acf464cd98 [ 1030.357178] env[62740]: DEBUG nova.network.neutron [req-c0beb41a-8ef4-44b3-ab9c-59df34161b74 req-06d953ee-50ed-45c1-a3a9-4659d91d814b service nova] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Updated VIF entry in instance network info cache for port 08fe8b68-adea-43c7-b3c0-04100c2fae3e. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1030.357557] env[62740]: DEBUG nova.network.neutron [req-c0beb41a-8ef4-44b3-ab9c-59df34161b74 req-06d953ee-50ed-45c1-a3a9-4659d91d814b service nova] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Updating instance_info_cache with network_info: [{"id": "08fe8b68-adea-43c7-b3c0-04100c2fae3e", "address": "fa:16:3e:4c:c0:9c", "network": {"id": "2df153a5-47a3-46f8-96b7-aa7f4f657f55", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1427651514-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5b3436283d24d41ae0e599a35d1850c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08fe8b68-ad", "ovs_interfaceid": "08fe8b68-adea-43c7-b3c0-04100c2fae3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.358085] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-c0beb41a-8ef4-44b3-ab9c-59df34161b74 req-06d953ee-50ed-45c1-a3a9-4659d91d814b service nova] Expecting reply to msg adea0d17843843d78d84b2bd43959c44 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1030.366416] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg adea0d17843843d78d84b2bd43959c44 [ 1030.367053] env[62740]: DEBUG oslo_concurrency.lockutils [req-c0beb41a-8ef4-44b3-ab9c-59df34161b74 req-06d953ee-50ed-45c1-a3a9-4659d91d814b service nova] Releasing lock "refresh_cache-d8dac9af-0897-4fbf-8ee6-1fb3955d48c0" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.302897] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg c9cfb0f37ad24ac8964d2130d6674db8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1034.318188] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9cfb0f37ad24ac8964d2130d6674db8 [ 1034.318818] env[62740]: DEBUG oslo_concurrency.lockutils [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "472cd209-4192-4473-b788-d1ea342653bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.129112] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg ddcfc8acdf1e4024b56edd8064abd91b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1035.139033] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ddcfc8acdf1e4024b56edd8064abd91b [ 1035.139033] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "d8dac9af-0897-4fbf-8ee6-1fb3955d48c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.774859] env[62740]: WARNING oslo_vmware.rw_handles [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1038.774859] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1038.774859] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1038.774859] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1038.774859] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1038.774859] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1038.774859] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1038.774859] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1038.774859] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1038.774859] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1038.774859] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1038.774859] env[62740]: ERROR oslo_vmware.rw_handles [ 1038.775740] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/07172579-1799-400d-9f98-bf63e00a4b86/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1038.777571] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1038.777708] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Copying Virtual Disk [datastore1] vmware_temp/07172579-1799-400d-9f98-bf63e00a4b86/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/07172579-1799-400d-9f98-bf63e00a4b86/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1038.778142] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca4d2863-efee-41e9-b9f6-fb2c6ed71d34 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.786674] env[62740]: DEBUG oslo_vmware.api [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Waiting for the task: (returnval){ [ 1038.786674] env[62740]: value = "task-640178" [ 1038.786674] env[62740]: _type = "Task" [ 1038.786674] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.795413] env[62740]: DEBUG oslo_vmware.api [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Task: {'id': task-640178, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.296988] env[62740]: DEBUG oslo_vmware.exceptions [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1039.298483] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.298483] env[62740]: ERROR nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1039.298483] env[62740]: Faults: ['InvalidArgument'] [ 1039.298483] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Traceback (most recent call last): [ 1039.298483] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1039.298483] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] yield resources [ 1039.298483] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1039.298483] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] self.driver.spawn(context, instance, image_meta, [ 1039.298483] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1039.298483] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1039.298898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1039.298898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] self._fetch_image_if_missing(context, vi) [ 1039.298898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1039.298898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] image_cache(vi, tmp_image_ds_loc) [ 1039.298898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1039.298898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] vm_util.copy_virtual_disk( [ 1039.298898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1039.298898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] session._wait_for_task(vmdk_copy_task) [ 1039.298898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1039.298898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] return self.wait_for_task(task_ref) [ 1039.298898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1039.298898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] return evt.wait() [ 1039.298898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1039.299224] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] result = hub.switch() [ 1039.299224] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1039.299224] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] return self.greenlet.switch() [ 1039.299224] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1039.299224] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] self.f(*self.args, **self.kw) [ 1039.299224] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1039.299224] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] raise exceptions.translate_fault(task_info.error) [ 1039.299224] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1039.299224] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Faults: ['InvalidArgument'] [ 1039.299224] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] [ 1039.299224] env[62740]: INFO nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Terminating instance [ 1039.300242] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.301309] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1039.302083] env[62740]: DEBUG nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1039.302425] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1039.302516] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85ebcb3b-edaa-4bb4-b643-3cb6a48d9194 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.305559] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c464d4-7b64-4000-aa87-827063eb0d30 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.315260] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1039.318245] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f59ab3f-4e16-413d-b1c9-dc837cbcc014 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.319812] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1039.319985] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1039.320900] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c858727-edc2-40dc-a4be-eddd4e76ce97 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.328123] env[62740]: DEBUG oslo_vmware.api [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Waiting for the task: (returnval){ [ 1039.328123] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52722b9a-ba0b-3f8d-76c2-00c2a25fdfb3" [ 1039.328123] env[62740]: _type = "Task" [ 1039.328123] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.339370] env[62740]: DEBUG oslo_vmware.api [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52722b9a-ba0b-3f8d-76c2-00c2a25fdfb3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.380691] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1039.380914] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1039.381111] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Deleting the datastore file [datastore1] 68aa9321-22ce-45a0-8323-fa8564dca46b {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1039.381609] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-192b9640-08e6-449b-9cc7-80ddcaaee969 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.389792] env[62740]: DEBUG oslo_vmware.api [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Waiting for the task: (returnval){ [ 1039.389792] env[62740]: value = "task-640180" [ 1039.389792] env[62740]: _type = "Task" [ 1039.389792] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.399996] env[62740]: DEBUG oslo_vmware.api [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Task: {'id': task-640180, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.837618] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1039.837923] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Creating directory with path [datastore1] vmware_temp/4e483eab-8e81-4e14-8547-652f9c626cbb/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1039.838167] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5f1a997-71fd-461f-85d2-0fe11faac1dd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.852109] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Created directory with path [datastore1] vmware_temp/4e483eab-8e81-4e14-8547-652f9c626cbb/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1039.852337] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Fetch image to [datastore1] vmware_temp/4e483eab-8e81-4e14-8547-652f9c626cbb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1039.852515] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/4e483eab-8e81-4e14-8547-652f9c626cbb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1039.853384] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b5ff51-5a91-448e-8066-e1abf01ba08c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.860382] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59be1e2-5011-4caa-aaaf-7792c4265a52 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.870771] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9100eaae-d614-48da-a3b2-51b5feef0ee8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.905073] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a51dfc-0e25-4bdd-8700-ef25632f9426 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.913941] env[62740]: DEBUG oslo_vmware.api [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Task: {'id': task-640180, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080307} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.916863] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-033508ed-63e3-4c3e-815f-d89134dc5abd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.916863] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1039.916863] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1039.916863] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1039.916863] env[62740]: INFO nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1039.918481] env[62740]: DEBUG nova.compute.claims [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1039.918660] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.918877] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.920948] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 44d56438123342aea5b745796eeb51de in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1039.942054] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1039.964497] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44d56438123342aea5b745796eeb51de [ 1039.999937] env[62740]: DEBUG oslo_vmware.rw_handles [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4e483eab-8e81-4e14-8547-652f9c626cbb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1040.067471] env[62740]: DEBUG oslo_vmware.rw_handles [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1040.067471] env[62740]: DEBUG oslo_vmware.rw_handles [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4e483eab-8e81-4e14-8547-652f9c626cbb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1040.335611] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f549aaf-59b7-44c2-8273-2ff2e999c5f1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.343873] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27124371-fdf1-4f36-a419-842c1b454eb9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.375665] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c3279f-d379-4773-847d-ab2eab8b5b7a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.383315] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df500332-41e4-443f-bbe4-7ac0ffe701be {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.397118] env[62740]: DEBUG nova.compute.provider_tree [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.397630] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg aba1bccd1809416c93437f9ad0a22027 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1040.405773] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aba1bccd1809416c93437f9ad0a22027 [ 1040.406741] env[62740]: DEBUG nova.scheduler.client.report [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1040.409212] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 9b669a9e6c354d6faee66042e3d78db0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1040.424191] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b669a9e6c354d6faee66042e3d78db0 [ 1040.424963] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.506s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.425898] env[62740]: ERROR nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1040.425898] env[62740]: Faults: ['InvalidArgument'] [ 1040.425898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Traceback (most recent call last): [ 1040.425898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1040.425898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] self.driver.spawn(context, instance, image_meta, [ 1040.425898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1040.425898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1040.425898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1040.425898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] self._fetch_image_if_missing(context, vi) [ 1040.425898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1040.425898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] image_cache(vi, tmp_image_ds_loc) [ 1040.425898] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1040.426296] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] vm_util.copy_virtual_disk( [ 1040.426296] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1040.426296] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] session._wait_for_task(vmdk_copy_task) [ 1040.426296] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1040.426296] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] return self.wait_for_task(task_ref) [ 1040.426296] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1040.426296] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] return evt.wait() [ 1040.426296] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1040.426296] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] result = hub.switch() [ 1040.426296] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1040.426296] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] return self.greenlet.switch() [ 1040.426296] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1040.426296] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] self.f(*self.args, **self.kw) [ 1040.426670] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1040.426670] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] raise exceptions.translate_fault(task_info.error) [ 1040.426670] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1040.426670] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Faults: ['InvalidArgument'] [ 1040.426670] env[62740]: ERROR nova.compute.manager [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] [ 1040.426670] env[62740]: DEBUG nova.compute.utils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1040.427955] env[62740]: DEBUG nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Build of instance 68aa9321-22ce-45a0-8323-fa8564dca46b was re-scheduled: A specified parameter was not correct: fileType [ 1040.427955] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1040.428382] env[62740]: DEBUG nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1040.428570] env[62740]: DEBUG nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1040.428747] env[62740]: DEBUG nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1040.428939] env[62740]: DEBUG nova.network.neutron [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1041.232632] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg d4124c3cf27a4f17bb3640d5a38ab436 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.247900] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4124c3cf27a4f17bb3640d5a38ab436 [ 1041.249308] env[62740]: DEBUG nova.network.neutron [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.250071] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 16faeaed49094935a9f4c23bc374e15e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.266963] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16faeaed49094935a9f4c23bc374e15e [ 1041.267612] env[62740]: INFO nova.compute.manager [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Took 0.84 seconds to deallocate network for instance. [ 1041.269605] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 48b26cb51273461a9d38e7009056e917 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.320084] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48b26cb51273461a9d38e7009056e917 [ 1041.323185] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 9fe64e727f7c4dba9321c2b9f2178a4b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.371746] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fe64e727f7c4dba9321c2b9f2178a4b [ 1041.406238] env[62740]: INFO nova.scheduler.client.report [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Deleted allocations for instance 68aa9321-22ce-45a0-8323-fa8564dca46b [ 1041.418536] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 3d78c978803744e48730a3fa50732c19 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.440272] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d78c978803744e48730a3fa50732c19 [ 1041.442345] env[62740]: DEBUG oslo_concurrency.lockutils [None req-76b6a905-7cfc-4df6-ac39-a9c6d5293927 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Lock "68aa9321-22ce-45a0-8323-fa8564dca46b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 404.361s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.444714] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Expecting reply to msg f6fc4e16da0b425897b488fa64a83f20 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.445722] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Lock "68aa9321-22ce-45a0-8323-fa8564dca46b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 205.741s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.446114] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Acquiring lock "68aa9321-22ce-45a0-8323-fa8564dca46b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.446437] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Lock "68aa9321-22ce-45a0-8323-fa8564dca46b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.448645] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Lock "68aa9321-22ce-45a0-8323-fa8564dca46b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.002s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.451592] env[62740]: INFO nova.compute.manager [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Terminating instance [ 1041.454882] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Acquiring lock "refresh_cache-68aa9321-22ce-45a0-8323-fa8564dca46b" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.455135] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Acquired lock "refresh_cache-68aa9321-22ce-45a0-8323-fa8564dca46b" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.455404] env[62740]: DEBUG nova.network.neutron [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1041.455880] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg e8ddb4bac2ad4e03b9a0c9fee277294a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.464704] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8ddb4bac2ad4e03b9a0c9fee277294a [ 1041.468691] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6fc4e16da0b425897b488fa64a83f20 [ 1041.469234] env[62740]: DEBUG nova.compute.manager [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] [instance: 2162ea32-6407-4286-9340-b62a9ec0988e] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1041.473018] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Expecting reply to msg f7a00f29f3644436841ff276e5dcbf03 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.492841] env[62740]: DEBUG nova.network.neutron [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1041.513031] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7a00f29f3644436841ff276e5dcbf03 [ 1041.513683] env[62740]: DEBUG nova.compute.manager [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] [instance: 2162ea32-6407-4286-9340-b62a9ec0988e] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1041.514069] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Expecting reply to msg 5d52750ddcc5438795079eea3f4fb206 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.532575] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d52750ddcc5438795079eea3f4fb206 [ 1041.543440] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Lock "2162ea32-6407-4286-9340-b62a9ec0988e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.482s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.543440] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Expecting reply to msg 9dcf0e5fbb834f05a1907fb58e154493 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.562024] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9dcf0e5fbb834f05a1907fb58e154493 [ 1041.562024] env[62740]: DEBUG nova.compute.manager [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] [instance: d2669ffb-41b1-474d-bb7a-fabae11e69d0] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1041.562024] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Expecting reply to msg 51139ca395a04814a8cba95e1e98ff2c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.595067] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51139ca395a04814a8cba95e1e98ff2c [ 1041.596349] env[62740]: DEBUG nova.compute.manager [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] [instance: d2669ffb-41b1-474d-bb7a-fabae11e69d0] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1041.596920] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Expecting reply to msg 11b2e37e75174dd7906be267c130ce90 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.615132] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11b2e37e75174dd7906be267c130ce90 [ 1041.627533] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Acquiring lock "43e4ddf4-230e-49f7-975f-ba99a6da9398" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.627796] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Lock "43e4ddf4-230e-49f7-975f-ba99a6da9398" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.631302] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8502e303-1ca6-4cc8-9c89-c501446c6194 tempest-MultipleCreateTestJSON-375945429 tempest-MultipleCreateTestJSON-375945429-project-member] Lock "d2669ffb-41b1-474d-bb7a-fabae11e69d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.524s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.631800] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Expecting reply to msg 89b3b65807884a2e89e54817c08bfded in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.642784] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89b3b65807884a2e89e54817c08bfded [ 1041.643278] env[62740]: DEBUG nova.compute.manager [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] [instance: f7574228-f7fc-4ab0-9a38-7671046d46a9] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1041.644901] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Expecting reply to msg 624f75840084474191f891bdfaf3243f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.666184] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 624f75840084474191f891bdfaf3243f [ 1041.666745] env[62740]: DEBUG nova.compute.manager [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] [instance: f7574228-f7fc-4ab0-9a38-7671046d46a9] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1041.667102] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Expecting reply to msg fc5ae31ece1746409f10d50f1d8134c8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.681034] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc5ae31ece1746409f10d50f1d8134c8 [ 1041.696325] env[62740]: DEBUG oslo_concurrency.lockutils [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Lock "f7574228-f7fc-4ab0-9a38-7671046d46a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.914s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.697164] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Expecting reply to msg 9ca1e45e6d714332ac46beb1803d988d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.711849] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ca1e45e6d714332ac46beb1803d988d [ 1041.712396] env[62740]: DEBUG nova.compute.manager [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] [instance: b1b86050-2bb1-443b-967b-12531d71ba04] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1041.714091] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Expecting reply to msg 62d8ee1956a543078309d2e2248d32a7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.745891] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62d8ee1956a543078309d2e2248d32a7 [ 1041.746608] env[62740]: DEBUG nova.compute.manager [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] [instance: b1b86050-2bb1-443b-967b-12531d71ba04] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1041.746953] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Expecting reply to msg 6ca9d5208932473ba1190b13fe9fc2b1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.762024] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ca9d5208932473ba1190b13fe9fc2b1 [ 1041.772203] env[62740]: DEBUG nova.network.neutron [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.772790] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 040c0b6314cc43a6962f8b94d99b18e1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.775761] env[62740]: DEBUG oslo_concurrency.lockutils [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Lock "b1b86050-2bb1-443b-967b-12531d71ba04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.965s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.775960] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Expecting reply to msg 54fecb27746f4aca9058b332b4168abb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.782932] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 040c0b6314cc43a6962f8b94d99b18e1 [ 1041.783492] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Releasing lock "refresh_cache-68aa9321-22ce-45a0-8323-fa8564dca46b" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.783879] env[62740]: DEBUG nova.compute.manager [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1041.784190] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1041.784623] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86cb8b5a-8574-4d3d-82b8-5544c9c2e640 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.787265] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54fecb27746f4aca9058b332b4168abb [ 1041.787685] env[62740]: DEBUG nova.compute.manager [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] [instance: 285dcc26-a4e9-40bc-82dd-37931f46e7fe] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1041.789339] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Expecting reply to msg 498cd9c9bf184b1ca8ab29689125711b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.797998] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9643d2-d753-497c-86f1-d1cfb1936ca3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.813953] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 498cd9c9bf184b1ca8ab29689125711b [ 1041.814754] env[62740]: DEBUG nova.compute.manager [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] [instance: 285dcc26-a4e9-40bc-82dd-37931f46e7fe] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1041.817026] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Expecting reply to msg 5760207f9d47440db33ffa09b74c6290 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.827388] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5760207f9d47440db33ffa09b74c6290 [ 1041.827953] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 68aa9321-22ce-45a0-8323-fa8564dca46b could not be found. [ 1041.828126] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1041.828346] env[62740]: INFO nova.compute.manager [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1041.828594] env[62740]: DEBUG oslo.service.loopingcall [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1041.830666] env[62740]: DEBUG nova.compute.manager [-] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1041.830744] env[62740]: DEBUG nova.network.neutron [-] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1041.843365] env[62740]: DEBUG oslo_concurrency.lockutils [None req-21bca7be-5c08-4f7e-b029-329cf8a08e11 tempest-ListServersNegativeTestJSON-506053537 tempest-ListServersNegativeTestJSON-506053537-project-member] Lock "285dcc26-a4e9-40bc-82dd-37931f46e7fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.008s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.843999] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-44c3be26-0c85-4fd2-bfee-78d03eaf4ded tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 15f703e28cc14cc88f45dd986f82ac54 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.853400] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15f703e28cc14cc88f45dd986f82ac54 [ 1041.856038] env[62740]: DEBUG nova.compute.manager [None req-44c3be26-0c85-4fd2-bfee-78d03eaf4ded tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: b0995d6c-a700-47a3-a39d-6a6fe1462042] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1041.856038] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-44c3be26-0c85-4fd2-bfee-78d03eaf4ded tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg b3f469457fbf4c30851758d8b16d21ca in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.857099] env[62740]: DEBUG nova.network.neutron [-] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1041.858386] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a14f89cc854b4cf296275c91cbee8a59 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.864435] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a14f89cc854b4cf296275c91cbee8a59 [ 1041.864831] env[62740]: DEBUG nova.network.neutron [-] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.865203] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1a625e7c861041b9b337311960b68bd8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.878641] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a625e7c861041b9b337311960b68bd8 [ 1041.879185] env[62740]: INFO nova.compute.manager [-] [instance: 68aa9321-22ce-45a0-8323-fa8564dca46b] Took 0.05 seconds to deallocate network for instance. [ 1041.883435] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg b69e1dfee6b942099ce5c547db53c39e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.889264] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3f469457fbf4c30851758d8b16d21ca [ 1041.889264] env[62740]: DEBUG nova.compute.manager [None req-44c3be26-0c85-4fd2-bfee-78d03eaf4ded tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: b0995d6c-a700-47a3-a39d-6a6fe1462042] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1041.889812] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-44c3be26-0c85-4fd2-bfee-78d03eaf4ded tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg d4aa7fad5591494db37786c72a8169be in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.901518] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4aa7fad5591494db37786c72a8169be [ 1041.917792] env[62740]: DEBUG oslo_concurrency.lockutils [None req-44c3be26-0c85-4fd2-bfee-78d03eaf4ded tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Lock "b0995d6c-a700-47a3-a39d-6a6fe1462042" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.826s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.919725] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 926a5640fee14bd286beb73dfe315f3a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.927479] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b69e1dfee6b942099ce5c547db53c39e [ 1041.935340] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 926a5640fee14bd286beb73dfe315f3a [ 1041.935827] env[62740]: DEBUG nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1041.937671] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg b1b7789cc7f443a4944a132c4a4bf461 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.949501] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 7229a5be5c9445ac8ca9b3d093f727a8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1041.983191] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1b7789cc7f443a4944a132c4a4bf461 [ 1041.991958] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7229a5be5c9445ac8ca9b3d093f727a8 [ 1041.994943] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Lock "68aa9321-22ce-45a0-8323-fa8564dca46b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.549s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.995698] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a858de76-9c35-4693-8936-6e5c167cdfb3 tempest-ServersTestManualDisk-290868403 tempest-ServersTestManualDisk-290868403-project-member] Expecting reply to msg 6b2b46e1f54448b4a076c42fc6a60949 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1042.008563] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.008563] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.009168] env[62740]: INFO nova.compute.claims [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1042.010890] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 53c0271b57094d32991a23c7c4049f1f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1042.017053] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b2b46e1f54448b4a076c42fc6a60949 [ 1042.052244] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53c0271b57094d32991a23c7c4049f1f [ 1042.054176] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 6dac75f7b6e7469da84837b9075bbd5e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1042.066296] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6dac75f7b6e7469da84837b9075bbd5e [ 1042.317140] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5dfd96-6209-4070-ba02-ad64d272c2e3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.325078] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3224cf08-b5d3-4610-a942-e7c1000e97b2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.357516] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51430d78-cd28-4915-965e-203ad39f6552 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.364951] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17536a90-bdd0-4ead-ac22-c8550ca62e56 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.382204] env[62740]: DEBUG nova.compute.provider_tree [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.382731] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 35ca9874a5b64d39a91f50ea49438170 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1042.395546] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35ca9874a5b64d39a91f50ea49438170 [ 1042.396593] env[62740]: DEBUG nova.scheduler.client.report [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1042.398951] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg a114ea72a0b543f7b56783f0d89ae183 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1042.411377] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a114ea72a0b543f7b56783f0d89ae183 [ 1042.412234] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.404s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.412407] env[62740]: DEBUG nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1042.414235] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg eb87cacac5b44d0baaecd0d74c3250f5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1042.456499] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb87cacac5b44d0baaecd0d74c3250f5 [ 1042.457946] env[62740]: DEBUG nova.compute.utils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1042.458915] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 4fce3ad26bb644b7b2c3410a028176e1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1042.460151] env[62740]: DEBUG nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1042.461187] env[62740]: DEBUG nova.network.neutron [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1042.473996] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fce3ad26bb644b7b2c3410a028176e1 [ 1042.474817] env[62740]: DEBUG nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1042.476571] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 49e5ffaf8d9948288af0c48ae0501242 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1042.511687] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49e5ffaf8d9948288af0c48ae0501242 [ 1042.514756] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg f865c445d6644c108f981d43f912a481 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1042.545903] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f865c445d6644c108f981d43f912a481 [ 1042.546729] env[62740]: DEBUG nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1042.574281] env[62740]: DEBUG nova.virt.hardware [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1042.574521] env[62740]: DEBUG nova.virt.hardware [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1042.574761] env[62740]: DEBUG nova.virt.hardware [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1042.574857] env[62740]: DEBUG nova.virt.hardware [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1042.575017] env[62740]: DEBUG nova.virt.hardware [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1042.575169] env[62740]: DEBUG nova.virt.hardware [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1042.575392] env[62740]: DEBUG nova.virt.hardware [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1042.575764] env[62740]: DEBUG nova.virt.hardware [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1042.575994] env[62740]: DEBUG nova.virt.hardware [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1042.576183] env[62740]: DEBUG nova.virt.hardware [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1042.576364] env[62740]: DEBUG nova.virt.hardware [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1042.577297] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160c40d2-c194-4b38-981f-896be66f8740 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.587453] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb4a2ee-98da-4837-92c5-507dc4045148 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.710881] env[62740]: DEBUG nova.policy [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '048caea79813451d82924da94516d4ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b199b4b774e74500855ae3d51a5a4871', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1043.340932] env[62740]: DEBUG nova.network.neutron [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Successfully created port: 9f2f3f93-5cb6-497d-8c60-33a3c1ab6551 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1044.183468] env[62740]: DEBUG nova.network.neutron [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Successfully updated port: 9f2f3f93-5cb6-497d-8c60-33a3c1ab6551 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1044.183468] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 517f8c035b1e4d85a830e67f88ad2412 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1044.202332] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 517f8c035b1e4d85a830e67f88ad2412 [ 1044.203086] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Acquiring lock "refresh_cache-d6c3ca16-5c7c-41e6-9850-10221603ad2a" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.203218] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Acquired lock "refresh_cache-d6c3ca16-5c7c-41e6-9850-10221603ad2a" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.203383] env[62740]: DEBUG nova.network.neutron [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1044.204997] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg da0b186909294523961bcf15326be90b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1044.218696] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da0b186909294523961bcf15326be90b [ 1044.263294] env[62740]: DEBUG nova.network.neutron [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1044.344846] env[62740]: DEBUG nova.compute.manager [req-ceecf8dd-e784-4f33-a002-4570637f2e78 req-622b1ffe-3607-4acd-8def-c1d648f52f98 service nova] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Received event network-vif-plugged-9f2f3f93-5cb6-497d-8c60-33a3c1ab6551 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1044.345198] env[62740]: DEBUG oslo_concurrency.lockutils [req-ceecf8dd-e784-4f33-a002-4570637f2e78 req-622b1ffe-3607-4acd-8def-c1d648f52f98 service nova] Acquiring lock "d6c3ca16-5c7c-41e6-9850-10221603ad2a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.345198] env[62740]: DEBUG oslo_concurrency.lockutils [req-ceecf8dd-e784-4f33-a002-4570637f2e78 req-622b1ffe-3607-4acd-8def-c1d648f52f98 service nova] Lock "d6c3ca16-5c7c-41e6-9850-10221603ad2a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.345395] env[62740]: DEBUG oslo_concurrency.lockutils [req-ceecf8dd-e784-4f33-a002-4570637f2e78 req-622b1ffe-3607-4acd-8def-c1d648f52f98 service nova] Lock "d6c3ca16-5c7c-41e6-9850-10221603ad2a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.345570] env[62740]: DEBUG nova.compute.manager [req-ceecf8dd-e784-4f33-a002-4570637f2e78 req-622b1ffe-3607-4acd-8def-c1d648f52f98 service nova] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] No waiting events found dispatching network-vif-plugged-9f2f3f93-5cb6-497d-8c60-33a3c1ab6551 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1044.345650] env[62740]: WARNING nova.compute.manager [req-ceecf8dd-e784-4f33-a002-4570637f2e78 req-622b1ffe-3607-4acd-8def-c1d648f52f98 service nova] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Received unexpected event network-vif-plugged-9f2f3f93-5cb6-497d-8c60-33a3c1ab6551 for instance with vm_state building and task_state spawning. [ 1044.541134] env[62740]: DEBUG nova.network.neutron [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Updating instance_info_cache with network_info: [{"id": "9f2f3f93-5cb6-497d-8c60-33a3c1ab6551", "address": "fa:16:3e:c6:af:d2", "network": {"id": "b0e548ba-7547-4453-9fa6-b351136e17d9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-245802315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b199b4b774e74500855ae3d51a5a4871", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f2f3f93-5c", "ovs_interfaceid": "9f2f3f93-5cb6-497d-8c60-33a3c1ab6551", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.541248] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 2416d267bcb24220adb1cb79b3235698 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1044.560708] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2416d267bcb24220adb1cb79b3235698 [ 1044.561476] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Releasing lock "refresh_cache-d6c3ca16-5c7c-41e6-9850-10221603ad2a" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.563129] env[62740]: DEBUG nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Instance network_info: |[{"id": "9f2f3f93-5cb6-497d-8c60-33a3c1ab6551", "address": "fa:16:3e:c6:af:d2", "network": {"id": "b0e548ba-7547-4453-9fa6-b351136e17d9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-245802315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b199b4b774e74500855ae3d51a5a4871", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f2f3f93-5c", "ovs_interfaceid": "9f2f3f93-5cb6-497d-8c60-33a3c1ab6551", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1044.563853] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:af:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '50171613-b419-45e3-9ada-fcb6cd921428', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f2f3f93-5cb6-497d-8c60-33a3c1ab6551', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1044.571889] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Creating folder: Project (b199b4b774e74500855ae3d51a5a4871). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1044.572534] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4f4cdd9-9c31-4a47-9711-cbcb668a0356 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.588652] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Created folder: Project (b199b4b774e74500855ae3d51a5a4871) in parent group-v156037. [ 1044.588895] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Creating folder: Instances. Parent ref: group-v156120. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1044.589171] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-076b9f09-782a-486a-a00e-13306dba3a82 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.598167] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Created folder: Instances in parent group-v156120. [ 1044.598399] env[62740]: DEBUG oslo.service.loopingcall [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1044.598619] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1044.598828] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-63ed8ad9-3145-488d-a257-ceab8b9a8205 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.620825] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1044.620825] env[62740]: value = "task-640183" [ 1044.620825] env[62740]: _type = "Task" [ 1044.620825] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.629908] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640183, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.132852] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640183, 'name': CreateVM_Task, 'duration_secs': 0.414005} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.133060] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1045.133771] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.133939] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.134327] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1045.134586] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1137ca18-dafb-43ab-a384-34aa022dc353 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.139676] env[62740]: DEBUG oslo_vmware.api [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Waiting for the task: (returnval){ [ 1045.139676] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]522af29d-4628-d3af-0960-970047bd449b" [ 1045.139676] env[62740]: _type = "Task" [ 1045.139676] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.147270] env[62740]: DEBUG oslo_vmware.api [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]522af29d-4628-d3af-0960-970047bd449b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.649743] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.650011] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1045.650223] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.618179] env[62740]: DEBUG nova.compute.manager [req-64146da3-6471-4e42-a4e1-ef8490bf62c8 req-7f97c98c-87fc-4c24-8b0b-7fc189587421 service nova] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Received event network-changed-9f2f3f93-5cb6-497d-8c60-33a3c1ab6551 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1046.618390] env[62740]: DEBUG nova.compute.manager [req-64146da3-6471-4e42-a4e1-ef8490bf62c8 req-7f97c98c-87fc-4c24-8b0b-7fc189587421 service nova] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Refreshing instance network info cache due to event network-changed-9f2f3f93-5cb6-497d-8c60-33a3c1ab6551. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1046.618607] env[62740]: DEBUG oslo_concurrency.lockutils [req-64146da3-6471-4e42-a4e1-ef8490bf62c8 req-7f97c98c-87fc-4c24-8b0b-7fc189587421 service nova] Acquiring lock "refresh_cache-d6c3ca16-5c7c-41e6-9850-10221603ad2a" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.618750] env[62740]: DEBUG oslo_concurrency.lockutils [req-64146da3-6471-4e42-a4e1-ef8490bf62c8 req-7f97c98c-87fc-4c24-8b0b-7fc189587421 service nova] Acquired lock "refresh_cache-d6c3ca16-5c7c-41e6-9850-10221603ad2a" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.618912] env[62740]: DEBUG nova.network.neutron [req-64146da3-6471-4e42-a4e1-ef8490bf62c8 req-7f97c98c-87fc-4c24-8b0b-7fc189587421 service nova] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Refreshing network info cache for port 9f2f3f93-5cb6-497d-8c60-33a3c1ab6551 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1046.619439] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-64146da3-6471-4e42-a4e1-ef8490bf62c8 req-7f97c98c-87fc-4c24-8b0b-7fc189587421 service nova] Expecting reply to msg 27ac0f04cdc745a1a08fdba4a44f80a3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1046.634951] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27ac0f04cdc745a1a08fdba4a44f80a3 [ 1046.862291] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Acquiring lock "eba85edb-4d86-42c9-8b49-98f2173a3eeb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.863181] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Lock "eba85edb-4d86-42c9-8b49-98f2173a3eeb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.038600] env[62740]: DEBUG nova.network.neutron [req-64146da3-6471-4e42-a4e1-ef8490bf62c8 req-7f97c98c-87fc-4c24-8b0b-7fc189587421 service nova] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Updated VIF entry in instance network info cache for port 9f2f3f93-5cb6-497d-8c60-33a3c1ab6551. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1047.038978] env[62740]: DEBUG nova.network.neutron [req-64146da3-6471-4e42-a4e1-ef8490bf62c8 req-7f97c98c-87fc-4c24-8b0b-7fc189587421 service nova] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Updating instance_info_cache with network_info: [{"id": "9f2f3f93-5cb6-497d-8c60-33a3c1ab6551", "address": "fa:16:3e:c6:af:d2", "network": {"id": "b0e548ba-7547-4453-9fa6-b351136e17d9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-245802315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b199b4b774e74500855ae3d51a5a4871", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f2f3f93-5c", "ovs_interfaceid": "9f2f3f93-5cb6-497d-8c60-33a3c1ab6551", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.039542] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-64146da3-6471-4e42-a4e1-ef8490bf62c8 req-7f97c98c-87fc-4c24-8b0b-7fc189587421 service nova] Expecting reply to msg 4561abf5cd794188923ca481f8084599 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1047.051110] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4561abf5cd794188923ca481f8084599 [ 1047.051798] env[62740]: DEBUG oslo_concurrency.lockutils [req-64146da3-6471-4e42-a4e1-ef8490bf62c8 req-7f97c98c-87fc-4c24-8b0b-7fc189587421 service nova] Releasing lock "refresh_cache-d6c3ca16-5c7c-41e6-9850-10221603ad2a" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1054.289085] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 01856097822c409e884f30f2cf56462a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1054.298754] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01856097822c409e884f30f2cf56462a [ 1054.299246] env[62740]: DEBUG oslo_concurrency.lockutils [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Acquiring lock "d6c3ca16-5c7c-41e6-9850-10221603ad2a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.248719] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Acquiring lock "7aacf4e0-b508-4a18-909a-3d1fe9458d98" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.249088] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Lock "7aacf4e0-b508-4a18-909a-3d1fe9458d98" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.893150] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.893477] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.893764] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f213728cbdf545698404d87146fcfa7e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1066.904127] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f213728cbdf545698404d87146fcfa7e [ 1066.905089] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.905302] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.905473] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.905629] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1066.906729] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5486e56-211c-42de-b415-6a90adeb5301 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.915701] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea37ac38-18e3-444d-8cb3-bd614516bae2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.929432] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae5b538-4c0e-4377-84d3-f946eb5573f6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.935461] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67b28c9-4fd5-4680-8412-e02b3e5b1ef8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.964971] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181663MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1066.965136] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.965327] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.966128] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7a703df10f594c4f8acedb0017d65068 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1067.001583] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a703df10f594c4f8acedb0017d65068 [ 1067.005784] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7cd861bc9a064cfbb01bc7cf4e02c075 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1067.016415] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cd861bc9a064cfbb01bc7cf4e02c075 [ 1067.037744] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a24df1e4-2865-4ab3-beae-0892dca12bef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.037906] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 00085df9-ce61-4ccc-8ecf-16956109eb8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.038048] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 697e3884-2ef4-423e-af81-e5d1e94f65a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.038444] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance f22357ec-450c-4545-8822-74b83bfc5a35 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.038570] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 732da1c8-e83e-4dd7-96c2-dbfa9468baab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.038712] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388a39df-9fa9-4153-9f3c-4ad94fd5edfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.038824] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 5f57389d-853e-4439-872a-8345664578d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.038979] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 472cd209-4192-4473-b788-d1ea342653bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.039058] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d8dac9af-0897-4fbf-8ee6-1fb3955d48c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.039185] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d6c3ca16-5c7c-41e6-9850-10221603ad2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.039772] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5403c640185249b2b45c7017da44e2fd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1067.050527] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5403c640185249b2b45c7017da44e2fd [ 1067.051369] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 158406db-7196-4826-aefa-20a58daa186b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.051874] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 01a2cee7b2a345cbad6bacc4717a880a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1067.061931] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01a2cee7b2a345cbad6bacc4717a880a [ 1067.062661] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance cf00af51-2b31-4b99-a692-8b0851dd74b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.063179] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 54a59c22dca342a39ba5e2b2f86ae607 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1067.072706] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54a59c22dca342a39ba5e2b2f86ae607 [ 1067.073408] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6ca702af-1a5c-40bb-b6c7-2f55ca308c02 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.073934] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0be76d563e9a4b32bddab6af552ca2d0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1067.083879] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0be76d563e9a4b32bddab6af552ca2d0 [ 1067.084568] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e21a5624-20ca-45d8-a0bf-dd87cec1c701 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.085067] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg bce4bd3ac67c420f8122697d4e592f8b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1067.094693] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bce4bd3ac67c420f8122697d4e592f8b [ 1067.095448] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b0b16f66-8dbc-4e9b-a932-5de45215cfff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.095959] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 235311115de14851866a35710d6a51e0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1067.105709] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 235311115de14851866a35710d6a51e0 [ 1067.106407] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 56106517-e735-4bf5-8d5a-dc0d4aab3991 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.106894] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg af5c569c61fb4482b381cc179b196aa3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1067.116784] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af5c569c61fb4482b381cc179b196aa3 [ 1067.117680] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 43e4ddf4-230e-49f7-975f-ba99a6da9398 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.118254] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2c8a7f6e114b4d05b7d306770a28628f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1067.127457] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c8a7f6e114b4d05b7d306770a28628f [ 1067.128110] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance eba85edb-4d86-42c9-8b49-98f2173a3eeb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.128620] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 38bbfa070f224928b8555b29495ea00f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1067.137347] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38bbfa070f224928b8555b29495ea00f [ 1067.137988] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 7aacf4e0-b508-4a18-909a-3d1fe9458d98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.138257] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1067.138445] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1067.378879] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1388fa30-250b-4e1c-8ae8-00fc5730c2d1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.386395] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917c1f9e-7f19-4f05-a70f-4c04b24ba917 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.416667] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a98066-5069-48a2-920d-85bd5517058a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.423795] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84928aa3-5260-4623-bd21-fef0e97fb873 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.436660] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.437127] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg cfc64ed3d151406193f134525a1094de in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1067.445701] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfc64ed3d151406193f134525a1094de [ 1067.446610] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1067.448883] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7c3d661a2b3847459b17d64661643d00 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1067.461944] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c3d661a2b3847459b17d64661643d00 [ 1067.462653] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1067.462874] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.498s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.459792] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1068.460116] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1068.885665] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1068.890315] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1068.890496] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1068.890627] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1068.891243] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 23474f6cf3e84deb87d83febc7486f68 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1068.908152] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23474f6cf3e84deb87d83febc7486f68 [ 1068.910479] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1068.910630] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1068.910794] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1068.910932] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1068.911070] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1068.911238] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1068.911390] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 5f57389d-853e-4439-872a-8345664578d0] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1068.911496] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1068.911617] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1068.911734] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1068.911917] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1069.890401] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1070.890627] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1070.890917] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1071.891455] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1072.241757] env[62740]: WARNING oslo_vmware.rw_handles [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1072.241757] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1072.241757] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1072.241757] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1072.241757] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1072.241757] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1072.241757] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1072.241757] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1072.241757] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1072.241757] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1072.241757] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1072.241757] env[62740]: ERROR oslo_vmware.rw_handles [ 1072.242268] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/a580d5e3-c768-4e6d-8ac3-a01fe42675e2/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1072.244177] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1072.244433] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Copying Virtual Disk [datastore2] vmware_temp/a580d5e3-c768-4e6d-8ac3-a01fe42675e2/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/a580d5e3-c768-4e6d-8ac3-a01fe42675e2/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1072.244719] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4648e5c3-4cfa-4402-8b08-09ed8673b54d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.253499] env[62740]: DEBUG oslo_vmware.api [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Waiting for the task: (returnval){ [ 1072.253499] env[62740]: value = "task-640184" [ 1072.253499] env[62740]: _type = "Task" [ 1072.253499] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.261623] env[62740]: DEBUG oslo_vmware.api [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Task: {'id': task-640184, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.764168] env[62740]: DEBUG oslo_vmware.exceptions [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1072.764516] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.765264] env[62740]: ERROR nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1072.765264] env[62740]: Faults: ['InvalidArgument'] [ 1072.765264] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Traceback (most recent call last): [ 1072.765264] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1072.765264] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] yield resources [ 1072.765264] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1072.765264] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] self.driver.spawn(context, instance, image_meta, [ 1072.765264] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1072.765264] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1072.765264] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1072.765264] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] self._fetch_image_if_missing(context, vi) [ 1072.765264] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1072.765699] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] image_cache(vi, tmp_image_ds_loc) [ 1072.765699] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1072.765699] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] vm_util.copy_virtual_disk( [ 1072.765699] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1072.765699] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] session._wait_for_task(vmdk_copy_task) [ 1072.765699] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1072.765699] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] return self.wait_for_task(task_ref) [ 1072.765699] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1072.765699] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] return evt.wait() [ 1072.765699] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1072.765699] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] result = hub.switch() [ 1072.765699] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1072.765699] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] return self.greenlet.switch() [ 1072.766074] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1072.766074] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] self.f(*self.args, **self.kw) [ 1072.766074] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1072.766074] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] raise exceptions.translate_fault(task_info.error) [ 1072.766074] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1072.766074] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Faults: ['InvalidArgument'] [ 1072.766074] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] [ 1072.766074] env[62740]: INFO nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Terminating instance [ 1072.767473] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.767684] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1072.768314] env[62740]: DEBUG nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1072.768557] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1072.768791] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b113f94-8640-410e-ba1f-9fbdd3d3c545 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.771426] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0d380f-d185-4be6-a5b9-1e2cb69508e8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.778189] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1072.778462] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ba37ddd-78b6-42c2-9333-64a00da0fe0a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.780622] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1072.780796] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1072.781748] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-257f1ea9-bf58-4507-ae35-bf05efe9deea {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.786180] env[62740]: DEBUG oslo_vmware.api [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Waiting for the task: (returnval){ [ 1072.786180] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]527df617-80a3-6c8f-f1a2-112f6c9513ee" [ 1072.786180] env[62740]: _type = "Task" [ 1072.786180] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.793507] env[62740]: DEBUG oslo_vmware.api [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]527df617-80a3-6c8f-f1a2-112f6c9513ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.847644] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1072.847860] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1072.848047] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Deleting the datastore file [datastore2] a24df1e4-2865-4ab3-beae-0892dca12bef {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1072.848542] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f1aa154-d2a1-49bc-81c1-d86a37b6cc04 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.855282] env[62740]: DEBUG oslo_vmware.api [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Waiting for the task: (returnval){ [ 1072.855282] env[62740]: value = "task-640186" [ 1072.855282] env[62740]: _type = "Task" [ 1072.855282] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.863235] env[62740]: DEBUG oslo_vmware.api [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Task: {'id': task-640186, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.297286] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1073.297585] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Creating directory with path [datastore2] vmware_temp/2b4b9b4b-dfd3-4406-b05e-5371a5e833b7/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1073.297820] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1a9ef0e-b18b-4c8e-8356-242de5c98572 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.309974] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Created directory with path [datastore2] vmware_temp/2b4b9b4b-dfd3-4406-b05e-5371a5e833b7/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1073.310204] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Fetch image to [datastore2] vmware_temp/2b4b9b4b-dfd3-4406-b05e-5371a5e833b7/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1073.310376] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/2b4b9b4b-dfd3-4406-b05e-5371a5e833b7/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1073.311329] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b77ca1-54eb-4c4d-80f3-da3909123061 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.319867] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e4774c-c15e-4fec-9a67-402d7993eb5f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.329270] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ce6f3d-067d-4efa-9cb1-1d0949f4c782 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.362608] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e30d29c-b703-4ca1-8ab4-f0bbbb87562b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.370219] env[62740]: DEBUG oslo_vmware.api [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Task: {'id': task-640186, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072916} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.371797] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1073.371993] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1073.372185] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1073.372358] env[62740]: INFO nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1073.374211] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-64b350e5-4aef-49b1-8183-b9838ca6693e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.376340] env[62740]: DEBUG nova.compute.claims [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1073.376587] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.376896] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.379230] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 60d46c6edb4d44e3b6840b531cbe1f94 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1073.400645] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1073.423168] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60d46c6edb4d44e3b6840b531cbe1f94 [ 1073.457983] env[62740]: DEBUG oslo_vmware.rw_handles [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2b4b9b4b-dfd3-4406-b05e-5371a5e833b7/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1073.523909] env[62740]: DEBUG oslo_vmware.rw_handles [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1073.524106] env[62740]: DEBUG oslo_vmware.rw_handles [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2b4b9b4b-dfd3-4406-b05e-5371a5e833b7/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1073.702985] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4261638-fe3b-4f88-972b-0e39001f7bd0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.710910] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b22f65-d573-4e31-8f3c-f6dbb595a392 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.741804] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce21cc0-4119-44c0-a7ee-7e7a3d075668 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.748708] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c60a59-76bf-4e6c-90c8-f2d9862f8125 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.761225] env[62740]: DEBUG nova.compute.provider_tree [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1073.761704] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 8afb7e51b7c54b27a7723683a63bc340 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1073.769680] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8afb7e51b7c54b27a7723683a63bc340 [ 1073.770549] env[62740]: DEBUG nova.scheduler.client.report [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1073.772764] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 14be40875ac74f7a9f562e93e2796a43 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1073.783591] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14be40875ac74f7a9f562e93e2796a43 [ 1073.784244] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.407s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.784757] env[62740]: ERROR nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1073.784757] env[62740]: Faults: ['InvalidArgument'] [ 1073.784757] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Traceback (most recent call last): [ 1073.784757] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1073.784757] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] self.driver.spawn(context, instance, image_meta, [ 1073.784757] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1073.784757] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1073.784757] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1073.784757] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] self._fetch_image_if_missing(context, vi) [ 1073.784757] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1073.784757] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] image_cache(vi, tmp_image_ds_loc) [ 1073.784757] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1073.785154] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] vm_util.copy_virtual_disk( [ 1073.785154] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1073.785154] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] session._wait_for_task(vmdk_copy_task) [ 1073.785154] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1073.785154] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] return self.wait_for_task(task_ref) [ 1073.785154] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1073.785154] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] return evt.wait() [ 1073.785154] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1073.785154] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] result = hub.switch() [ 1073.785154] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1073.785154] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] return self.greenlet.switch() [ 1073.785154] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1073.785154] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] self.f(*self.args, **self.kw) [ 1073.785690] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1073.785690] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] raise exceptions.translate_fault(task_info.error) [ 1073.785690] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1073.785690] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Faults: ['InvalidArgument'] [ 1073.785690] env[62740]: ERROR nova.compute.manager [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] [ 1073.785690] env[62740]: DEBUG nova.compute.utils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1073.786767] env[62740]: DEBUG nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Build of instance a24df1e4-2865-4ab3-beae-0892dca12bef was re-scheduled: A specified parameter was not correct: fileType [ 1073.786767] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1073.787153] env[62740]: DEBUG nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1073.787328] env[62740]: DEBUG nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1073.787499] env[62740]: DEBUG nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1073.787665] env[62740]: DEBUG nova.network.neutron [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1074.130498] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 38cdf2d0071749a5a3c337e816944e7e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.141051] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38cdf2d0071749a5a3c337e816944e7e [ 1074.141712] env[62740]: DEBUG nova.network.neutron [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.142207] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 1043393f741a4a81a43c7aed79d43824 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.153115] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1043393f741a4a81a43c7aed79d43824 [ 1074.154031] env[62740]: INFO nova.compute.manager [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Took 0.37 seconds to deallocate network for instance. [ 1074.156163] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 4d8fa38513784f3da3ef94de40fa8162 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.191074] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d8fa38513784f3da3ef94de40fa8162 [ 1074.193971] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 76094b8bf8754f9686e556be3d273d50 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.235916] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76094b8bf8754f9686e556be3d273d50 [ 1074.259736] env[62740]: INFO nova.scheduler.client.report [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Deleted allocations for instance a24df1e4-2865-4ab3-beae-0892dca12bef [ 1074.266860] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 3ae8905593804db688793ec96591f9d8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.288388] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ae8905593804db688793ec96591f9d8 [ 1074.289520] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9245783e-92b9-45b4-8951-aaff6842bae0 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Lock "a24df1e4-2865-4ab3-beae-0892dca12bef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 465.118s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.290430] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bc639afe-9daf-45b2-bcef-b70a1ea9afae tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 2d526ba557094146beb0f3bf89f4f816 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.291597] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Lock "a24df1e4-2865-4ab3-beae-0892dca12bef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 264.347s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.291958] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Acquiring lock "a24df1e4-2865-4ab3-beae-0892dca12bef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.292431] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Lock "a24df1e4-2865-4ab3-beae-0892dca12bef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.292649] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Lock "a24df1e4-2865-4ab3-beae-0892dca12bef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.295049] env[62740]: INFO nova.compute.manager [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Terminating instance [ 1074.297274] env[62740]: DEBUG nova.compute.manager [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1074.297755] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1074.298020] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-efa5e876-40b7-4d19-8ef1-f634ca032dbd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.308949] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f6a168-3558-48fc-98be-4ee0f1f481e6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.320678] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d526ba557094146beb0f3bf89f4f816 [ 1074.321294] env[62740]: DEBUG nova.compute.manager [None req-bc639afe-9daf-45b2-bcef-b70a1ea9afae tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: d60c8e65-1eb3-4017-b28e-8b72b0b4b2e1] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1074.323053] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bc639afe-9daf-45b2-bcef-b70a1ea9afae tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 2efe466391e9403ab385fc06b3e479a8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.342690] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a24df1e4-2865-4ab3-beae-0892dca12bef could not be found. [ 1074.342856] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1074.343041] env[62740]: INFO nova.compute.manager [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1074.343295] env[62740]: DEBUG oslo.service.loopingcall [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1074.343510] env[62740]: DEBUG nova.compute.manager [-] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1074.343607] env[62740]: DEBUG nova.network.neutron [-] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1074.347284] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2efe466391e9403ab385fc06b3e479a8 [ 1074.347783] env[62740]: DEBUG nova.compute.manager [None req-bc639afe-9daf-45b2-bcef-b70a1ea9afae tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: d60c8e65-1eb3-4017-b28e-8b72b0b4b2e1] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1074.348123] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bc639afe-9daf-45b2-bcef-b70a1ea9afae tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg ee445030b7a54ae0aabc28b9d1bce1fa in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.357291] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee445030b7a54ae0aabc28b9d1bce1fa [ 1074.363410] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 435b75c780ab4bd0a1312f116c295ace in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.370201] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 435b75c780ab4bd0a1312f116c295ace [ 1074.370542] env[62740]: DEBUG nova.network.neutron [-] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.371273] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 798bae2455e7446a99ad188f211bd7c3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.373506] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bc639afe-9daf-45b2-bcef-b70a1ea9afae tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Lock "d60c8e65-1eb3-4017-b28e-8b72b0b4b2e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.696s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.374038] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa24f57-b11a-40f0-91cd-4be53c042c2b tempest-FloatingIPsAssociationNegativeTestJSON-1331363627 tempest-FloatingIPsAssociationNegativeTestJSON-1331363627-project-member] Expecting reply to msg 7556e9eff31b4bc3a933ef7d6533bfe3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.380860] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 798bae2455e7446a99ad188f211bd7c3 [ 1074.381378] env[62740]: INFO nova.compute.manager [-] [instance: a24df1e4-2865-4ab3-beae-0892dca12bef] Took 0.04 seconds to deallocate network for instance. [ 1074.386876] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 98511a19257c44c5acc14d2c2700d6eb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.389521] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7556e9eff31b4bc3a933ef7d6533bfe3 [ 1074.389969] env[62740]: DEBUG nova.compute.manager [None req-1aa24f57-b11a-40f0-91cd-4be53c042c2b tempest-FloatingIPsAssociationNegativeTestJSON-1331363627 tempest-FloatingIPsAssociationNegativeTestJSON-1331363627-project-member] [instance: 6531eee8-d8ec-4a9d-911c-d7d9b88baf19] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1074.391774] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa24f57-b11a-40f0-91cd-4be53c042c2b tempest-FloatingIPsAssociationNegativeTestJSON-1331363627 tempest-FloatingIPsAssociationNegativeTestJSON-1331363627-project-member] Expecting reply to msg 4f98281ab10a4245b77132a6286eb3ca in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.413827] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f98281ab10a4245b77132a6286eb3ca [ 1074.414400] env[62740]: DEBUG nova.compute.manager [None req-1aa24f57-b11a-40f0-91cd-4be53c042c2b tempest-FloatingIPsAssociationNegativeTestJSON-1331363627 tempest-FloatingIPsAssociationNegativeTestJSON-1331363627-project-member] [instance: 6531eee8-d8ec-4a9d-911c-d7d9b88baf19] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1074.414741] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa24f57-b11a-40f0-91cd-4be53c042c2b tempest-FloatingIPsAssociationNegativeTestJSON-1331363627 tempest-FloatingIPsAssociationNegativeTestJSON-1331363627-project-member] Expecting reply to msg 3a1378bf36e944f2a4a79fad3a8e6634 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.419731] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98511a19257c44c5acc14d2c2700d6eb [ 1074.426914] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a1378bf36e944f2a4a79fad3a8e6634 [ 1074.435219] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 1824ce0f84d442ab865e4b0f6ae79ed6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.440100] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1aa24f57-b11a-40f0-91cd-4be53c042c2b tempest-FloatingIPsAssociationNegativeTestJSON-1331363627 tempest-FloatingIPsAssociationNegativeTestJSON-1331363627-project-member] Lock "6531eee8-d8ec-4a9d-911c-d7d9b88baf19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.105s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.440699] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 26dbea58c9b940b79cc74533d44646f4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.449095] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26dbea58c9b940b79cc74533d44646f4 [ 1074.449596] env[62740]: DEBUG nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1074.451387] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg af2834f2c41d473eb7119e50a6b2f0e1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.490329] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af2834f2c41d473eb7119e50a6b2f0e1 [ 1074.496618] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1824ce0f84d442ab865e4b0f6ae79ed6 [ 1074.499704] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Lock "a24df1e4-2865-4ab3-beae-0892dca12bef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.208s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.500844] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e86a9ce2-093d-4a46-96fc-773b73db05a2 tempest-ServersV294TestFqdnHostnames-1584391458 tempest-ServersV294TestFqdnHostnames-1584391458-project-member] Expecting reply to msg 31839c6eb8c64f87b802448c07340232 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.510074] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.510419] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.511892] env[62740]: INFO nova.compute.claims [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1074.513758] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 12436dec353f4dddb660d5fd8eb9e672 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.515590] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31839c6eb8c64f87b802448c07340232 [ 1074.545283] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12436dec353f4dddb660d5fd8eb9e672 [ 1074.547043] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 05f8640e46a44ec28c4af8798d4f6779 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.554333] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05f8640e46a44ec28c4af8798d4f6779 [ 1074.855480] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5017b47a-e426-4dd9-8d08-d43614a902a4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.863139] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-340e28eb-b98d-4976-abf3-85e96474a4d1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.892924] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902a199a-629b-4b19-9ee1-34df2a8b2162 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.899423] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a09ed1-78a6-4d46-a7eb-1c2b44256c51 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.911734] env[62740]: DEBUG nova.compute.provider_tree [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.912217] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 691d8dd7a125403495a660d42f2673b7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.921730] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 691d8dd7a125403495a660d42f2673b7 [ 1074.922601] env[62740]: DEBUG nova.scheduler.client.report [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1074.924805] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg db32e0ec85ce4066be0cf51775e6800e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.935657] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db32e0ec85ce4066be0cf51775e6800e [ 1074.936319] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.426s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.936760] env[62740]: DEBUG nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1074.938485] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 00a071f44bcb427aa11fd54d104dce79 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.966793] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00a071f44bcb427aa11fd54d104dce79 [ 1074.968497] env[62740]: DEBUG nova.compute.utils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1074.969106] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg ca21d5080eb04117a118ce46f3382153 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1074.970191] env[62740]: DEBUG nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1074.970278] env[62740]: DEBUG nova.network.neutron [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1074.980768] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca21d5080eb04117a118ce46f3382153 [ 1074.981318] env[62740]: DEBUG nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1074.983031] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 0b4f51b4634c49919f85b88d0b843467 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1075.013390] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b4f51b4634c49919f85b88d0b843467 [ 1075.016223] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 48cea357809e403c9e6ee38e04f4f0a3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1075.045041] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48cea357809e403c9e6ee38e04f4f0a3 [ 1075.046212] env[62740]: DEBUG nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1075.055505] env[62740]: DEBUG nova.policy [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '429cf598d00c43d59a6ee3d9baa7ebce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '197a24d529294fddbbfd59ab958cc9ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1075.073822] env[62740]: DEBUG nova.virt.hardware [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1075.074088] env[62740]: DEBUG nova.virt.hardware [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1075.074256] env[62740]: DEBUG nova.virt.hardware [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1075.074443] env[62740]: DEBUG nova.virt.hardware [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1075.074594] env[62740]: DEBUG nova.virt.hardware [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1075.074747] env[62740]: DEBUG nova.virt.hardware [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1075.074983] env[62740]: DEBUG nova.virt.hardware [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1075.075195] env[62740]: DEBUG nova.virt.hardware [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1075.075371] env[62740]: DEBUG nova.virt.hardware [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1075.075540] env[62740]: DEBUG nova.virt.hardware [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1075.075713] env[62740]: DEBUG nova.virt.hardware [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1075.076617] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2751006f-7f5b-464e-815c-82ddac5d7e72 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.085045] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3558132-fa2b-4964-9ea4-9029828dcb7c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.537651] env[62740]: DEBUG nova.network.neutron [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Successfully created port: 6e8aed24-74bd-4edb-8834-184c43ccc72f {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1076.421477] env[62740]: DEBUG nova.compute.manager [req-d20d8af5-54c0-4e10-8b0b-064929c75d0c req-f2a912b8-0eb7-414a-9749-7b140a1cee4a service nova] [instance: 158406db-7196-4826-aefa-20a58daa186b] Received event network-vif-plugged-6e8aed24-74bd-4edb-8834-184c43ccc72f {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1076.421709] env[62740]: DEBUG oslo_concurrency.lockutils [req-d20d8af5-54c0-4e10-8b0b-064929c75d0c req-f2a912b8-0eb7-414a-9749-7b140a1cee4a service nova] Acquiring lock "158406db-7196-4826-aefa-20a58daa186b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.421918] env[62740]: DEBUG oslo_concurrency.lockutils [req-d20d8af5-54c0-4e10-8b0b-064929c75d0c req-f2a912b8-0eb7-414a-9749-7b140a1cee4a service nova] Lock "158406db-7196-4826-aefa-20a58daa186b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.422102] env[62740]: DEBUG oslo_concurrency.lockutils [req-d20d8af5-54c0-4e10-8b0b-064929c75d0c req-f2a912b8-0eb7-414a-9749-7b140a1cee4a service nova] Lock "158406db-7196-4826-aefa-20a58daa186b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.422275] env[62740]: DEBUG nova.compute.manager [req-d20d8af5-54c0-4e10-8b0b-064929c75d0c req-f2a912b8-0eb7-414a-9749-7b140a1cee4a service nova] [instance: 158406db-7196-4826-aefa-20a58daa186b] No waiting events found dispatching network-vif-plugged-6e8aed24-74bd-4edb-8834-184c43ccc72f {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1076.422441] env[62740]: WARNING nova.compute.manager [req-d20d8af5-54c0-4e10-8b0b-064929c75d0c req-f2a912b8-0eb7-414a-9749-7b140a1cee4a service nova] [instance: 158406db-7196-4826-aefa-20a58daa186b] Received unexpected event network-vif-plugged-6e8aed24-74bd-4edb-8834-184c43ccc72f for instance with vm_state building and task_state spawning. [ 1076.469219] env[62740]: DEBUG nova.network.neutron [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Successfully updated port: 6e8aed24-74bd-4edb-8834-184c43ccc72f {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1076.469783] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg babb0fd3267c454388231d9c0718b574 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1076.481361] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg babb0fd3267c454388231d9c0718b574 [ 1076.482075] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Acquiring lock "refresh_cache-158406db-7196-4826-aefa-20a58daa186b" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.482222] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Acquired lock "refresh_cache-158406db-7196-4826-aefa-20a58daa186b" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.482372] env[62740]: DEBUG nova.network.neutron [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1076.482756] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 16f8e395b77f48e9a289631d1e6902d0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1076.492513] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16f8e395b77f48e9a289631d1e6902d0 [ 1076.540523] env[62740]: DEBUG nova.network.neutron [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1076.779405] env[62740]: DEBUG nova.network.neutron [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Updating instance_info_cache with network_info: [{"id": "6e8aed24-74bd-4edb-8834-184c43ccc72f", "address": "fa:16:3e:ee:ac:a2", "network": {"id": "385f4044-1182-4d64-9b08-fda463d212e0", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-815642864-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "197a24d529294fddbbfd59ab958cc9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dad4f433-bb0b-45c7-8040-972ef2277f75", "external-id": "nsx-vlan-transportzone-451", "segmentation_id": 451, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e8aed24-74", "ovs_interfaceid": "6e8aed24-74bd-4edb-8834-184c43ccc72f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.779955] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 6c0f7848c82d45e491bfe56ca42e6285 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1076.796137] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c0f7848c82d45e491bfe56ca42e6285 [ 1076.797029] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Releasing lock "refresh_cache-158406db-7196-4826-aefa-20a58daa186b" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1076.797029] env[62740]: DEBUG nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Instance network_info: |[{"id": "6e8aed24-74bd-4edb-8834-184c43ccc72f", "address": "fa:16:3e:ee:ac:a2", "network": {"id": "385f4044-1182-4d64-9b08-fda463d212e0", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-815642864-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "197a24d529294fddbbfd59ab958cc9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dad4f433-bb0b-45c7-8040-972ef2277f75", "external-id": "nsx-vlan-transportzone-451", "segmentation_id": 451, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e8aed24-74", "ovs_interfaceid": "6e8aed24-74bd-4edb-8834-184c43ccc72f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1076.797236] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:ac:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dad4f433-bb0b-45c7-8040-972ef2277f75', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e8aed24-74bd-4edb-8834-184c43ccc72f', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1076.804775] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Creating folder: Project (197a24d529294fddbbfd59ab958cc9ae). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1076.805349] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b4cfdc3-a398-4f6c-aa1c-bb535ae7f36b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.818576] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Created folder: Project (197a24d529294fddbbfd59ab958cc9ae) in parent group-v156037. [ 1076.818762] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Creating folder: Instances. Parent ref: group-v156123. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1076.818988] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c342173-1332-453b-b270-d008aba54192 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.827711] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Created folder: Instances in parent group-v156123. [ 1076.827935] env[62740]: DEBUG oslo.service.loopingcall [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1076.828138] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 158406db-7196-4826-aefa-20a58daa186b] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1076.828358] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47920882-68a4-49c5-be46-d455fc98b7b8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.847988] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1076.847988] env[62740]: value = "task-640189" [ 1076.847988] env[62740]: _type = "Task" [ 1076.847988] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.855768] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640189, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.357401] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640189, 'name': CreateVM_Task, 'duration_secs': 0.270562} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.357551] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 158406db-7196-4826-aefa-20a58daa186b] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1077.358229] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1077.358431] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.358756] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1077.359014] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-480d7a62-43d1-442f-b4da-d63f99b2dfa6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.363069] env[62740]: DEBUG oslo_vmware.api [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Waiting for the task: (returnval){ [ 1077.363069] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52b28965-b76f-32e9-b915-1c17841c34cd" [ 1077.363069] env[62740]: _type = "Task" [ 1077.363069] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.370089] env[62740]: DEBUG oslo_vmware.api [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52b28965-b76f-32e9-b915-1c17841c34cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.873490] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1077.873840] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1077.873946] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.490610] env[62740]: DEBUG nova.compute.manager [req-7b04cb3c-e392-4d85-92d0-621959df8e07 req-6701e5ed-6c3b-47da-8e7d-5f18b3da6db5 service nova] [instance: 158406db-7196-4826-aefa-20a58daa186b] Received event network-changed-6e8aed24-74bd-4edb-8834-184c43ccc72f {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1078.490823] env[62740]: DEBUG nova.compute.manager [req-7b04cb3c-e392-4d85-92d0-621959df8e07 req-6701e5ed-6c3b-47da-8e7d-5f18b3da6db5 service nova] [instance: 158406db-7196-4826-aefa-20a58daa186b] Refreshing instance network info cache due to event network-changed-6e8aed24-74bd-4edb-8834-184c43ccc72f. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1078.491045] env[62740]: DEBUG oslo_concurrency.lockutils [req-7b04cb3c-e392-4d85-92d0-621959df8e07 req-6701e5ed-6c3b-47da-8e7d-5f18b3da6db5 service nova] Acquiring lock "refresh_cache-158406db-7196-4826-aefa-20a58daa186b" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.491193] env[62740]: DEBUG oslo_concurrency.lockutils [req-7b04cb3c-e392-4d85-92d0-621959df8e07 req-6701e5ed-6c3b-47da-8e7d-5f18b3da6db5 service nova] Acquired lock "refresh_cache-158406db-7196-4826-aefa-20a58daa186b" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.491359] env[62740]: DEBUG nova.network.neutron [req-7b04cb3c-e392-4d85-92d0-621959df8e07 req-6701e5ed-6c3b-47da-8e7d-5f18b3da6db5 service nova] [instance: 158406db-7196-4826-aefa-20a58daa186b] Refreshing network info cache for port 6e8aed24-74bd-4edb-8834-184c43ccc72f {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1078.491876] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-7b04cb3c-e392-4d85-92d0-621959df8e07 req-6701e5ed-6c3b-47da-8e7d-5f18b3da6db5 service nova] Expecting reply to msg 618fc5a2fd21470fbf2538ff1e218bfb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1078.499457] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 618fc5a2fd21470fbf2538ff1e218bfb [ 1078.869150] env[62740]: DEBUG nova.network.neutron [req-7b04cb3c-e392-4d85-92d0-621959df8e07 req-6701e5ed-6c3b-47da-8e7d-5f18b3da6db5 service nova] [instance: 158406db-7196-4826-aefa-20a58daa186b] Updated VIF entry in instance network info cache for port 6e8aed24-74bd-4edb-8834-184c43ccc72f. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1078.869534] env[62740]: DEBUG nova.network.neutron [req-7b04cb3c-e392-4d85-92d0-621959df8e07 req-6701e5ed-6c3b-47da-8e7d-5f18b3da6db5 service nova] [instance: 158406db-7196-4826-aefa-20a58daa186b] Updating instance_info_cache with network_info: [{"id": "6e8aed24-74bd-4edb-8834-184c43ccc72f", "address": "fa:16:3e:ee:ac:a2", "network": {"id": "385f4044-1182-4d64-9b08-fda463d212e0", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-815642864-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "197a24d529294fddbbfd59ab958cc9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dad4f433-bb0b-45c7-8040-972ef2277f75", "external-id": "nsx-vlan-transportzone-451", "segmentation_id": 451, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e8aed24-74", "ovs_interfaceid": "6e8aed24-74bd-4edb-8834-184c43ccc72f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.870060] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-7b04cb3c-e392-4d85-92d0-621959df8e07 req-6701e5ed-6c3b-47da-8e7d-5f18b3da6db5 service nova] Expecting reply to msg d15ce85a367d44c7b769166e759df31c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1078.878395] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d15ce85a367d44c7b769166e759df31c [ 1078.879546] env[62740]: DEBUG oslo_concurrency.lockutils [req-7b04cb3c-e392-4d85-92d0-621959df8e07 req-6701e5ed-6c3b-47da-8e7d-5f18b3da6db5 service nova] Releasing lock "refresh_cache-158406db-7196-4826-aefa-20a58daa186b" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.448637] env[62740]: WARNING oslo_vmware.rw_handles [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1089.448637] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1089.448637] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1089.448637] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1089.448637] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1089.448637] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1089.448637] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1089.448637] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1089.448637] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1089.448637] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1089.448637] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1089.448637] env[62740]: ERROR oslo_vmware.rw_handles [ 1089.449584] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/4e483eab-8e81-4e14-8547-652f9c626cbb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1089.451207] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1089.451467] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Copying Virtual Disk [datastore1] vmware_temp/4e483eab-8e81-4e14-8547-652f9c626cbb/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/4e483eab-8e81-4e14-8547-652f9c626cbb/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1089.451805] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a4441f1-fc30-45fd-be0c-809e9d64a69b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.461281] env[62740]: DEBUG oslo_vmware.api [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Waiting for the task: (returnval){ [ 1089.461281] env[62740]: value = "task-640190" [ 1089.461281] env[62740]: _type = "Task" [ 1089.461281] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.469331] env[62740]: DEBUG oslo_vmware.api [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Task: {'id': task-640190, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.970644] env[62740]: DEBUG oslo_vmware.exceptions [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1089.970945] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.971533] env[62740]: ERROR nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1089.971533] env[62740]: Faults: ['InvalidArgument'] [ 1089.971533] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Traceback (most recent call last): [ 1089.971533] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1089.971533] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] yield resources [ 1089.971533] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1089.971533] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] self.driver.spawn(context, instance, image_meta, [ 1089.971533] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1089.971533] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1089.971533] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1089.971533] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] self._fetch_image_if_missing(context, vi) [ 1089.971533] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1089.971901] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] image_cache(vi, tmp_image_ds_loc) [ 1089.971901] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1089.971901] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] vm_util.copy_virtual_disk( [ 1089.971901] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1089.971901] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] session._wait_for_task(vmdk_copy_task) [ 1089.971901] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1089.971901] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] return self.wait_for_task(task_ref) [ 1089.971901] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1089.971901] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] return evt.wait() [ 1089.971901] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1089.971901] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] result = hub.switch() [ 1089.971901] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1089.971901] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] return self.greenlet.switch() [ 1089.972267] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1089.972267] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] self.f(*self.args, **self.kw) [ 1089.972267] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1089.972267] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] raise exceptions.translate_fault(task_info.error) [ 1089.972267] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1089.972267] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Faults: ['InvalidArgument'] [ 1089.972267] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] [ 1089.972267] env[62740]: INFO nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Terminating instance [ 1089.973448] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.973626] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1089.973869] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88e14e5c-22d9-4a35-bede-40e2243775df {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.976262] env[62740]: DEBUG nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1089.976465] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1089.977205] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37da49f-ea82-491d-a6c1-9993e2463112 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.983516] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1089.983747] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33fa56e9-8c3a-4a88-b28a-c2871bd2d7f8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.985881] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.986071] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1089.987087] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-608ae673-da7d-4885-8be6-93989d0310da {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.992311] env[62740]: DEBUG oslo_vmware.api [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Waiting for the task: (returnval){ [ 1089.992311] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]523dcc40-e9a2-ea77-c737-e11428aff823" [ 1089.992311] env[62740]: _type = "Task" [ 1089.992311] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.004949] env[62740]: DEBUG oslo_vmware.api [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]523dcc40-e9a2-ea77-c737-e11428aff823, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.043916] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1090.043916] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1090.044109] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Deleting the datastore file [datastore1] 00085df9-ce61-4ccc-8ecf-16956109eb8f {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1090.044329] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-830d9129-3547-4257-8c61-83d1638756c7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.050744] env[62740]: DEBUG oslo_vmware.api [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Waiting for the task: (returnval){ [ 1090.050744] env[62740]: value = "task-640192" [ 1090.050744] env[62740]: _type = "Task" [ 1090.050744] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.060824] env[62740]: DEBUG oslo_vmware.api [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Task: {'id': task-640192, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.503130] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1090.503406] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Creating directory with path [datastore1] vmware_temp/841d92f6-8f78-4340-9547-4802566328ae/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1090.503643] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04093470-40a1-4d56-a2a0-029be899dfdc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.515881] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Created directory with path [datastore1] vmware_temp/841d92f6-8f78-4340-9547-4802566328ae/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1090.516102] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Fetch image to [datastore1] vmware_temp/841d92f6-8f78-4340-9547-4802566328ae/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1090.516279] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/841d92f6-8f78-4340-9547-4802566328ae/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1090.517018] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a459c840-80ef-4d53-8f6c-55c2549ec9d2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.523527] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a974b78f-c174-4b3d-99e2-0a0ca7fff243 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.532128] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a65b6fd-f400-4468-98e8-6f3e1f429d59 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.564493] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6158432-2ae2-4345-8fb0-669de90864bb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.571555] env[62740]: DEBUG oslo_vmware.api [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Task: {'id': task-640192, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077325} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.572949] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1090.573159] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1090.573339] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1090.573519] env[62740]: INFO nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1090.575286] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-87f57432-41bb-4b13-8ef7-36c85043abd2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.577177] env[62740]: DEBUG nova.compute.claims [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1090.577353] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.577564] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.579444] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg e43768b947e64341873782ac519ec7f2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1090.605113] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1090.616116] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e43768b947e64341873782ac519ec7f2 [ 1090.657912] env[62740]: DEBUG oslo_vmware.rw_handles [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/841d92f6-8f78-4340-9547-4802566328ae/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1090.719390] env[62740]: DEBUG oslo_vmware.rw_handles [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1090.719700] env[62740]: DEBUG oslo_vmware.rw_handles [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/841d92f6-8f78-4340-9547-4802566328ae/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1090.873841] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1378e49-4692-47f1-8b46-01111a4b53eb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.881176] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca4f484-61ee-4fdb-9262-de44f9d8239f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.910667] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270a2792-29cb-4f15-9d81-fe7dadcef6e6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.917406] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6e9162-86c7-46e0-bcf2-e1c71b8ec59e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.929946] env[62740]: DEBUG nova.compute.provider_tree [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.930441] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 38beb0c469a143eab0af6e0e97869ccc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1090.938058] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38beb0c469a143eab0af6e0e97869ccc [ 1090.938952] env[62740]: DEBUG nova.scheduler.client.report [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1090.941213] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 0a310c850e964a8f846ccd40841099a3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1090.952290] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a310c850e964a8f846ccd40841099a3 [ 1090.952947] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.375s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.953466] env[62740]: ERROR nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1090.953466] env[62740]: Faults: ['InvalidArgument'] [ 1090.953466] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Traceback (most recent call last): [ 1090.953466] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1090.953466] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] self.driver.spawn(context, instance, image_meta, [ 1090.953466] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1090.953466] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1090.953466] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1090.953466] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] self._fetch_image_if_missing(context, vi) [ 1090.953466] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1090.953466] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] image_cache(vi, tmp_image_ds_loc) [ 1090.953466] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1090.953844] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] vm_util.copy_virtual_disk( [ 1090.953844] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1090.953844] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] session._wait_for_task(vmdk_copy_task) [ 1090.953844] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1090.953844] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] return self.wait_for_task(task_ref) [ 1090.953844] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1090.953844] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] return evt.wait() [ 1090.953844] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1090.953844] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] result = hub.switch() [ 1090.953844] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1090.953844] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] return self.greenlet.switch() [ 1090.953844] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1090.953844] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] self.f(*self.args, **self.kw) [ 1090.954226] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1090.954226] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] raise exceptions.translate_fault(task_info.error) [ 1090.954226] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1090.954226] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Faults: ['InvalidArgument'] [ 1090.954226] env[62740]: ERROR nova.compute.manager [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] [ 1090.954226] env[62740]: DEBUG nova.compute.utils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1090.955458] env[62740]: DEBUG nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Build of instance 00085df9-ce61-4ccc-8ecf-16956109eb8f was re-scheduled: A specified parameter was not correct: fileType [ 1090.955458] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1090.955839] env[62740]: DEBUG nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1090.956023] env[62740]: DEBUG nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1090.956213] env[62740]: DEBUG nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1090.956391] env[62740]: DEBUG nova.network.neutron [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1091.341030] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 09d86b30c26b4492a03e824c02e02968 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.350446] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09d86b30c26b4492a03e824c02e02968 [ 1091.351020] env[62740]: DEBUG nova.network.neutron [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.351495] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 5c7be9e92a634b8196a3288e7836120d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.361714] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c7be9e92a634b8196a3288e7836120d [ 1091.362383] env[62740]: INFO nova.compute.manager [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Took 0.41 seconds to deallocate network for instance. [ 1091.364469] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 56c49e5a7712450a8a33413c89f1eaa8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.404059] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56c49e5a7712450a8a33413c89f1eaa8 [ 1091.407369] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg fef1b07b122841519640ad56c5a76bbe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.437986] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fef1b07b122841519640ad56c5a76bbe [ 1091.464602] env[62740]: INFO nova.scheduler.client.report [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Deleted allocations for instance 00085df9-ce61-4ccc-8ecf-16956109eb8f [ 1091.471243] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 7493125126044a91857e67cc83e8cd15 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.486397] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7493125126044a91857e67cc83e8cd15 [ 1091.487057] env[62740]: DEBUG oslo_concurrency.lockutils [None req-09317351-a109-4643-a772-41846a599304 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Lock "00085df9-ce61-4ccc-8ecf-16956109eb8f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 434.312s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.487549] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 66fe25792c8846309332913cd6e1103b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.488352] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Lock "00085df9-ce61-4ccc-8ecf-16956109eb8f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 237.028s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.488926] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Acquiring lock "00085df9-ce61-4ccc-8ecf-16956109eb8f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.488926] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Lock "00085df9-ce61-4ccc-8ecf-16956109eb8f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.488926] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Lock "00085df9-ce61-4ccc-8ecf-16956109eb8f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.491015] env[62740]: INFO nova.compute.manager [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Terminating instance [ 1091.493154] env[62740]: DEBUG nova.compute.manager [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1091.493388] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1091.493900] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fdd94655-2ef3-4491-8611-59ecd50c1071 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.503899] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7390a9-2584-4b1a-9d06-15fae5a73ced {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.515035] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66fe25792c8846309332913cd6e1103b [ 1091.515826] env[62740]: DEBUG nova.compute.manager [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1091.517292] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 44e3ebbadcfa4a6c9433608db085f993 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.535775] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 00085df9-ce61-4ccc-8ecf-16956109eb8f could not be found. [ 1091.535959] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1091.536154] env[62740]: INFO nova.compute.manager [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1091.536402] env[62740]: DEBUG oslo.service.loopingcall [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1091.536634] env[62740]: DEBUG nova.compute.manager [-] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1091.536762] env[62740]: DEBUG nova.network.neutron [-] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1091.553838] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 88b9175ff65c4caf8382843dc81a940c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.554973] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44e3ebbadcfa4a6c9433608db085f993 [ 1091.564700] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88b9175ff65c4caf8382843dc81a940c [ 1091.565062] env[62740]: DEBUG nova.network.neutron [-] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.565451] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cd4cd420321b46d28830758961afbb8e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.572177] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.572925] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.574308] env[62740]: INFO nova.compute.claims [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1091.576075] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 2fce573b41c84dbc944eb8252c1070d5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.577804] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd4cd420321b46d28830758961afbb8e [ 1091.578251] env[62740]: INFO nova.compute.manager [-] [instance: 00085df9-ce61-4ccc-8ecf-16956109eb8f] Took 0.04 seconds to deallocate network for instance. [ 1091.581533] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 529df795e1f44f01be9d122317363959 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.611906] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fce573b41c84dbc944eb8252c1070d5 [ 1091.612496] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 529df795e1f44f01be9d122317363959 [ 1091.614251] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 16a30ea71064466f8351390e1e2409c2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.621470] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16a30ea71064466f8351390e1e2409c2 [ 1091.634188] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg 46547c90a45c491c97dc8733ef0c2c95 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.680288] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46547c90a45c491c97dc8733ef0c2c95 [ 1091.683403] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Lock "00085df9-ce61-4ccc-8ecf-16956109eb8f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.195s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.683744] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0c4703f9-4bb7-46dd-9e31-43d161839b80 tempest-InstanceActionsV221TestJSON-1746537320 tempest-InstanceActionsV221TestJSON-1746537320-project-member] Expecting reply to msg cb21591eab5a4d199bf9effdb1c1fe35 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.699658] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb21591eab5a4d199bf9effdb1c1fe35 [ 1091.835973] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef0397b-dcd8-428f-a6f4-b47a29e1ed41 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.843295] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45970562-8e3b-42dc-ad91-f88ae4487879 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.872355] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5a617e-886d-40e2-a2eb-e451f429e1ac {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.879605] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a452279b-7293-4b81-a4f8-8ae4c54363d7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.892784] env[62740]: DEBUG nova.compute.provider_tree [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1091.893300] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 167a4c2c996d4b69b863653fed3ddc40 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.901535] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 167a4c2c996d4b69b863653fed3ddc40 [ 1091.902440] env[62740]: DEBUG nova.scheduler.client.report [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1091.904759] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg c6ff3173f253458b97a7c5c141a62cf6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.915797] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6ff3173f253458b97a7c5c141a62cf6 [ 1091.916613] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.344s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.917017] env[62740]: DEBUG nova.compute.manager [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1091.918906] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 8411170848da41529d665e150a4ae703 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.949073] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8411170848da41529d665e150a4ae703 [ 1091.952269] env[62740]: DEBUG nova.compute.utils [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1091.952882] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 39be5e2206de4a68bf889c6c1d64c507 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.954054] env[62740]: DEBUG nova.compute.manager [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1091.954170] env[62740]: DEBUG nova.network.neutron [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1091.961895] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39be5e2206de4a68bf889c6c1d64c507 [ 1091.962580] env[62740]: DEBUG nova.compute.manager [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1091.964295] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 72574e5044884ccc90b5f9c220070d2e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1091.994333] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72574e5044884ccc90b5f9c220070d2e [ 1091.996108] env[62740]: INFO nova.virt.block_device [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Booting with volume a8b46b50-b319-476c-a9dc-31e9571fd2b0 at /dev/sda [ 1092.018176] env[62740]: DEBUG nova.policy [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4653ce3bae34688ad83a298c6891961', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83ab31764f1a409284a3f3d77983c6d0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1092.040535] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c208cbbd-192d-429f-b8be-3d07bfcc6d86 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.048976] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceeefc7e-803f-4ea1-9481-23b49658191f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.075327] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be37be57-a5f6-4c4e-8647-b4dbe4a21a10 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.082623] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862d1670-6e8e-4504-975b-5287974de0ef {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.109183] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72193ffc-e754-463e-b4ce-e2ad348662c7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.115375] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360d4a63-d402-4cc2-98d6-808d0e8c6464 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.130172] env[62740]: DEBUG nova.virt.block_device [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Updating existing volume attachment record: c2393013-870d-48cd-9ce1-da0cefd1b996 {{(pid=62740) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 1092.252481] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 32510dc9d1a1402aa09e40c82fb15d72 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1092.264195] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32510dc9d1a1402aa09e40c82fb15d72 [ 1092.314998] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 0d290e20a66546dab54809a2acc19eea in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1092.326960] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d290e20a66546dab54809a2acc19eea [ 1092.329095] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg c6e0e12446484fca8177e84c1179e457 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1092.367064] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6e0e12446484fca8177e84c1179e457 [ 1092.368377] env[62740]: DEBUG nova.compute.manager [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1092.368905] env[62740]: DEBUG nova.virt.hardware [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1092.369126] env[62740]: DEBUG nova.virt.hardware [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1092.369285] env[62740]: DEBUG nova.virt.hardware [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1092.369503] env[62740]: DEBUG nova.virt.hardware [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1092.369683] env[62740]: DEBUG nova.virt.hardware [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1092.369840] env[62740]: DEBUG nova.virt.hardware [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1092.370058] env[62740]: DEBUG nova.virt.hardware [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1092.370222] env[62740]: DEBUG nova.virt.hardware [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1092.370389] env[62740]: DEBUG nova.virt.hardware [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1092.370553] env[62740]: DEBUG nova.virt.hardware [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1092.370746] env[62740]: DEBUG nova.virt.hardware [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1092.371866] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00613e46-5d5e-449b-8a7b-7efcbbf57fb2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.380779] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ceb274-b02e-427f-9d4c-3c47c0ee6ea1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.454376] env[62740]: DEBUG nova.network.neutron [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Successfully created port: e5a583ec-2110-4864-aff5-95258c10a46d {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1093.264106] env[62740]: DEBUG nova.network.neutron [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Successfully updated port: e5a583ec-2110-4864-aff5-95258c10a46d {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1093.264707] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg a5426c898de44fc58f1a0df14853ed19 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1093.276953] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5426c898de44fc58f1a0df14853ed19 [ 1093.277766] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Acquiring lock "refresh_cache-cf00af51-2b31-4b99-a692-8b0851dd74b8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.278560] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Acquired lock "refresh_cache-cf00af51-2b31-4b99-a692-8b0851dd74b8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.278560] env[62740]: DEBUG nova.network.neutron [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1093.278560] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 602e83a7f80a4bec9f3a004147bd73d2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1093.285671] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 602e83a7f80a4bec9f3a004147bd73d2 [ 1093.322456] env[62740]: DEBUG nova.network.neutron [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1093.507448] env[62740]: DEBUG nova.compute.manager [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Received event network-vif-plugged-e5a583ec-2110-4864-aff5-95258c10a46d {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1093.507670] env[62740]: DEBUG oslo_concurrency.lockutils [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] Acquiring lock "cf00af51-2b31-4b99-a692-8b0851dd74b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.507934] env[62740]: DEBUG oslo_concurrency.lockutils [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] Lock "cf00af51-2b31-4b99-a692-8b0851dd74b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.508152] env[62740]: DEBUG oslo_concurrency.lockutils [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] Lock "cf00af51-2b31-4b99-a692-8b0851dd74b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.508360] env[62740]: DEBUG nova.compute.manager [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] No waiting events found dispatching network-vif-plugged-e5a583ec-2110-4864-aff5-95258c10a46d {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1093.508547] env[62740]: WARNING nova.compute.manager [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Received unexpected event network-vif-plugged-e5a583ec-2110-4864-aff5-95258c10a46d for instance with vm_state building and task_state spawning. [ 1093.508712] env[62740]: DEBUG nova.compute.manager [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Received event network-changed-e5a583ec-2110-4864-aff5-95258c10a46d {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1093.508868] env[62740]: DEBUG nova.compute.manager [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Refreshing instance network info cache due to event network-changed-e5a583ec-2110-4864-aff5-95258c10a46d. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1093.509049] env[62740]: DEBUG oslo_concurrency.lockutils [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] Acquiring lock "refresh_cache-cf00af51-2b31-4b99-a692-8b0851dd74b8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.585798] env[62740]: DEBUG nova.network.neutron [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Updating instance_info_cache with network_info: [{"id": "e5a583ec-2110-4864-aff5-95258c10a46d", "address": "fa:16:3e:a4:85:34", "network": {"id": "482916b0-8b1f-40cb-b4c7-ac275b827786", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-308578270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83ab31764f1a409284a3f3d77983c6d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5a583ec-21", "ovs_interfaceid": "e5a583ec-2110-4864-aff5-95258c10a46d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.586338] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg c98467e560d240d1b6d02742ec24ef88 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1093.599956] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c98467e560d240d1b6d02742ec24ef88 [ 1093.600207] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Releasing lock "refresh_cache-cf00af51-2b31-4b99-a692-8b0851dd74b8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.600494] env[62740]: DEBUG nova.compute.manager [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Instance network_info: |[{"id": "e5a583ec-2110-4864-aff5-95258c10a46d", "address": "fa:16:3e:a4:85:34", "network": {"id": "482916b0-8b1f-40cb-b4c7-ac275b827786", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-308578270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83ab31764f1a409284a3f3d77983c6d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5a583ec-21", "ovs_interfaceid": "e5a583ec-2110-4864-aff5-95258c10a46d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1093.600831] env[62740]: DEBUG oslo_concurrency.lockutils [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] Acquired lock "refresh_cache-cf00af51-2b31-4b99-a692-8b0851dd74b8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.601043] env[62740]: DEBUG nova.network.neutron [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Refreshing network info cache for port e5a583ec-2110-4864-aff5-95258c10a46d {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1093.601449] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] Expecting reply to msg c80705b66dc14fe298c1b0de4d1c23a7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1093.602287] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:85:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c405e9f-a6c8-4308-acac-071654efe18e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e5a583ec-2110-4864-aff5-95258c10a46d', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1093.609998] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Creating folder: Project (83ab31764f1a409284a3f3d77983c6d0). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1093.610693] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c80705b66dc14fe298c1b0de4d1c23a7 [ 1093.611056] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a0f842f-d91d-4a21-b57d-cbba94fa1217 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.626390] env[62740]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1093.626560] env[62740]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62740) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1093.626932] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Folder already exists: Project (83ab31764f1a409284a3f3d77983c6d0). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1093.627142] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Creating folder: Instances. Parent ref: group-v156100. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1093.627373] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4f9930c-c97f-4661-a142-9d6c7cacf897 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.635875] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Created folder: Instances in parent group-v156100. [ 1093.636391] env[62740]: DEBUG oslo.service.loopingcall [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1093.636391] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1093.636513] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4da96266-cb1e-41b7-92fc-74c8fbd2fa0d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.654404] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1093.654404] env[62740]: value = "task-640195" [ 1093.654404] env[62740]: _type = "Task" [ 1093.654404] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.661483] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640195, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.957504] env[62740]: DEBUG nova.network.neutron [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Updated VIF entry in instance network info cache for port e5a583ec-2110-4864-aff5-95258c10a46d. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1093.957889] env[62740]: DEBUG nova.network.neutron [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Updating instance_info_cache with network_info: [{"id": "e5a583ec-2110-4864-aff5-95258c10a46d", "address": "fa:16:3e:a4:85:34", "network": {"id": "482916b0-8b1f-40cb-b4c7-ac275b827786", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-308578270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83ab31764f1a409284a3f3d77983c6d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5a583ec-21", "ovs_interfaceid": "e5a583ec-2110-4864-aff5-95258c10a46d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.958488] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] Expecting reply to msg 61a190229dc146a682e5c4a6df229b57 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1093.967542] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61a190229dc146a682e5c4a6df229b57 [ 1093.968214] env[62740]: DEBUG oslo_concurrency.lockutils [req-74eab431-cbfc-4695-8233-71a3a2523cdf req-ed2032a9-41dd-4faa-a674-075d17de01ae service nova] Releasing lock "refresh_cache-cf00af51-2b31-4b99-a692-8b0851dd74b8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1094.164230] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640195, 'name': CreateVM_Task, 'duration_secs': 0.302784} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.164414] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1094.165055] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'boot_index': 0, 'attachment_id': 'c2393013-870d-48cd-9ce1-da0cefd1b996', 'device_type': None, 'delete_on_termination': True, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-156106', 'volume_id': 'a8b46b50-b319-476c-a9dc-31e9571fd2b0', 'name': 'volume-a8b46b50-b319-476c-a9dc-31e9571fd2b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cf00af51-2b31-4b99-a692-8b0851dd74b8', 'attached_at': '', 'detached_at': '', 'volume_id': 'a8b46b50-b319-476c-a9dc-31e9571fd2b0', 'serial': 'a8b46b50-b319-476c-a9dc-31e9571fd2b0'}, 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=62740) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1094.165298] env[62740]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Root volume attach. Driver type: vmdk {{(pid=62740) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1094.166094] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b68572-97ed-4218-aaf4-925abae160f9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.175497] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bc3f11-bb0f-454d-b551-82799d6fb99a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.181181] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cefe805d-ee5a-4d91-bbcf-57c28e5cc5bb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.188692] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-fd72ab3c-6e51-4cac-a10a-d3e27570bcd6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.196324] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Waiting for the task: (returnval){ [ 1094.196324] env[62740]: value = "task-640196" [ 1094.196324] env[62740]: _type = "Task" [ 1094.196324] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.203627] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640196, 'name': RelocateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.709658] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640196, 'name': RelocateVM_Task} progress is 42%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.210146] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640196, 'name': RelocateVM_Task} progress is 54%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.709638] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640196, 'name': RelocateVM_Task} progress is 67%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.210863] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640196, 'name': RelocateVM_Task} progress is 82%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.708233] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640196, 'name': RelocateVM_Task} progress is 96%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.208276] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640196, 'name': RelocateVM_Task} progress is 98%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.710402] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640196, 'name': RelocateVM_Task, 'duration_secs': 3.25616} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.710817] env[62740]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Volume attach. Driver type: vmdk {{(pid=62740) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1097.711058] env[62740]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-156106', 'volume_id': 'a8b46b50-b319-476c-a9dc-31e9571fd2b0', 'name': 'volume-a8b46b50-b319-476c-a9dc-31e9571fd2b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cf00af51-2b31-4b99-a692-8b0851dd74b8', 'attached_at': '', 'detached_at': '', 'volume_id': 'a8b46b50-b319-476c-a9dc-31e9571fd2b0', 'serial': 'a8b46b50-b319-476c-a9dc-31e9571fd2b0'} {{(pid=62740) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1097.711815] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45da8dc-250a-453d-beaa-40a918fb033d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.728166] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef1da3f-b1f1-4759-acde-5bbbd6bd0534 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.750132] env[62740]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] volume-a8b46b50-b319-476c-a9dc-31e9571fd2b0/volume-a8b46b50-b319-476c-a9dc-31e9571fd2b0.vmdk or device None with type thin {{(pid=62740) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1097.750400] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f34e0993-2403-4bf9-a11a-e91df003ba0a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.768977] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Waiting for the task: (returnval){ [ 1097.768977] env[62740]: value = "task-640197" [ 1097.768977] env[62740]: _type = "Task" [ 1097.768977] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.776447] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640197, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.109923] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg ff910338a66442c9b34656d8ed223f12 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1098.119366] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff910338a66442c9b34656d8ed223f12 [ 1098.120017] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Acquiring lock "158406db-7196-4826-aefa-20a58daa186b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.279492] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640197, 'name': ReconfigVM_Task, 'duration_secs': 0.294506} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.279797] env[62740]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Reconfigured VM instance instance-00000037 to attach disk [datastore1] volume-a8b46b50-b319-476c-a9dc-31e9571fd2b0/volume-a8b46b50-b319-476c-a9dc-31e9571fd2b0.vmdk or device None with type thin {{(pid=62740) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1098.284862] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3ecb8a0-75b7-4f1d-85ac-a708da4bd2ba {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.299489] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Waiting for the task: (returnval){ [ 1098.299489] env[62740]: value = "task-640198" [ 1098.299489] env[62740]: _type = "Task" [ 1098.299489] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.308385] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640198, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.809895] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640198, 'name': ReconfigVM_Task, 'duration_secs': 0.11936} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.810237] env[62740]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-156106', 'volume_id': 'a8b46b50-b319-476c-a9dc-31e9571fd2b0', 'name': 'volume-a8b46b50-b319-476c-a9dc-31e9571fd2b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cf00af51-2b31-4b99-a692-8b0851dd74b8', 'attached_at': '', 'detached_at': '', 'volume_id': 'a8b46b50-b319-476c-a9dc-31e9571fd2b0', 'serial': 'a8b46b50-b319-476c-a9dc-31e9571fd2b0'} {{(pid=62740) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1098.810797] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-87d87215-24da-4987-a4ac-545f7a0422c9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.816987] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Waiting for the task: (returnval){ [ 1098.816987] env[62740]: value = "task-640199" [ 1098.816987] env[62740]: _type = "Task" [ 1098.816987] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.824210] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640199, 'name': Rename_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.327719] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640199, 'name': Rename_Task, 'duration_secs': 0.132356} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.327977] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Powering on the VM {{(pid=62740) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1099.328260] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a5dfa8d-8bb9-470d-8868-772c51e2b80e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.334873] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Waiting for the task: (returnval){ [ 1099.334873] env[62740]: value = "task-640200" [ 1099.334873] env[62740]: _type = "Task" [ 1099.334873] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.342385] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640200, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.844844] env[62740]: DEBUG oslo_vmware.api [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640200, 'name': PowerOnVM_Task, 'duration_secs': 0.464695} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.845147] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Powered on the VM {{(pid=62740) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1099.845352] env[62740]: INFO nova.compute.manager [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Took 7.48 seconds to spawn the instance on the hypervisor. [ 1099.845600] env[62740]: DEBUG nova.compute.manager [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Checking state {{(pid=62740) _get_power_state /opt/stack/nova/nova/compute/manager.py:1782}} [ 1099.846393] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2b78bd-4961-435b-8dc2-c708effbcc91 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.854049] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 878c36ded8904233bd36d0125e6b6316 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1099.888242] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 878c36ded8904233bd36d0125e6b6316 [ 1099.898702] env[62740]: INFO nova.compute.manager [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Took 8.34 seconds to build instance. [ 1099.898702] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg abbdd7ef07814ffd8d759dcc68d40c3e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1099.910808] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abbdd7ef07814ffd8d759dcc68d40c3e [ 1099.911622] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a0c6e7bd-a6e1-422b-8a37-55da34de9742 tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Lock "cf00af51-2b31-4b99-a692-8b0851dd74b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 191.426s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.912061] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 6884026e76634d3b9e872abd9e19fed8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1099.923258] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6884026e76634d3b9e872abd9e19fed8 [ 1099.923796] env[62740]: DEBUG nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1099.925710] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg da1a8554080a495bb9426d79a9b1a93a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1099.967036] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da1a8554080a495bb9426d79a9b1a93a [ 1100.000278] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.000278] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.000796] env[62740]: INFO nova.compute.claims [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1100.002458] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 519318b048fc4b9487db2c75f5aed881 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1100.045117] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 519318b048fc4b9487db2c75f5aed881 [ 1100.046937] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 141ecba387c44e4d8d585033102518cf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1100.056939] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 141ecba387c44e4d8d585033102518cf [ 1100.304123] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a79a5f-271c-4b90-8395-40bd52139a46 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.313072] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58c8a16-92d4-4681-ae0b-66604966be61 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.345522] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6db1b0-0d99-4a82-a994-b33cd7ba8919 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.353560] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50140d3d-1756-4102-b873-4076eb5d81eb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.367109] env[62740]: DEBUG nova.compute.provider_tree [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.367607] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg b62140042ed54fc5a69adc7c8b840ceb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1100.378237] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b62140042ed54fc5a69adc7c8b840ceb [ 1100.379212] env[62740]: DEBUG nova.scheduler.client.report [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1100.382514] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg d5ad2477ef4844d3b050dbe3b037e0b6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1100.393737] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5ad2477ef4844d3b050dbe3b037e0b6 [ 1100.394395] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.395s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.394849] env[62740]: DEBUG nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1100.396617] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 01acbb25daa04e6ebe002d3e23d62d9a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1100.444543] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01acbb25daa04e6ebe002d3e23d62d9a [ 1100.446022] env[62740]: DEBUG nova.compute.utils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1100.446706] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg f92d0cd626c7413c962b4353a5a42440 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1100.448400] env[62740]: DEBUG nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1100.448400] env[62740]: DEBUG nova.network.neutron [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1100.464536] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f92d0cd626c7413c962b4353a5a42440 [ 1100.465145] env[62740]: DEBUG nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1100.467828] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg fd9470155b6940899471f083d1514cca in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1100.501633] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd9470155b6940899471f083d1514cca [ 1100.504497] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 11f7684ac255448aa5e7eb31af6d6949 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1100.544090] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11f7684ac255448aa5e7eb31af6d6949 [ 1100.544090] env[62740]: DEBUG nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1100.562253] env[62740]: DEBUG nova.policy [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aee525677ce346b59af803377086c7b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '511c682375de4d5294723b0d656190d2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1100.572518] env[62740]: DEBUG nova.virt.hardware [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1100.572756] env[62740]: DEBUG nova.virt.hardware [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1100.572911] env[62740]: DEBUG nova.virt.hardware [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1100.573110] env[62740]: DEBUG nova.virt.hardware [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1100.573263] env[62740]: DEBUG nova.virt.hardware [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1100.573411] env[62740]: DEBUG nova.virt.hardware [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1100.573623] env[62740]: DEBUG nova.virt.hardware [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1100.573782] env[62740]: DEBUG nova.virt.hardware [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1100.573949] env[62740]: DEBUG nova.virt.hardware [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1100.574126] env[62740]: DEBUG nova.virt.hardware [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1100.574327] env[62740]: DEBUG nova.virt.hardware [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1100.575194] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d6414c-84c3-4665-ab80-9f64673dbe88 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.583791] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6dc391-6351-4b09-9519-51d5e4a2ae0a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.068548] env[62740]: DEBUG nova.network.neutron [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Successfully created port: 258d5ba9-8361-403d-a5f2-5ee0331408bb {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1102.579724] env[62740]: DEBUG nova.compute.manager [req-cc802719-4f6c-484a-a356-80d5b5b8e5d4 req-8f176319-edc8-45c7-9bd9-4c03445d3732 service nova] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Received event network-vif-plugged-258d5ba9-8361-403d-a5f2-5ee0331408bb {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1102.580050] env[62740]: DEBUG oslo_concurrency.lockutils [req-cc802719-4f6c-484a-a356-80d5b5b8e5d4 req-8f176319-edc8-45c7-9bd9-4c03445d3732 service nova] Acquiring lock "6ca702af-1a5c-40bb-b6c7-2f55ca308c02-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.580162] env[62740]: DEBUG oslo_concurrency.lockutils [req-cc802719-4f6c-484a-a356-80d5b5b8e5d4 req-8f176319-edc8-45c7-9bd9-4c03445d3732 service nova] Lock "6ca702af-1a5c-40bb-b6c7-2f55ca308c02-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.580330] env[62740]: DEBUG oslo_concurrency.lockutils [req-cc802719-4f6c-484a-a356-80d5b5b8e5d4 req-8f176319-edc8-45c7-9bd9-4c03445d3732 service nova] Lock "6ca702af-1a5c-40bb-b6c7-2f55ca308c02-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.580565] env[62740]: DEBUG nova.compute.manager [req-cc802719-4f6c-484a-a356-80d5b5b8e5d4 req-8f176319-edc8-45c7-9bd9-4c03445d3732 service nova] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] No waiting events found dispatching network-vif-plugged-258d5ba9-8361-403d-a5f2-5ee0331408bb {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1102.580663] env[62740]: WARNING nova.compute.manager [req-cc802719-4f6c-484a-a356-80d5b5b8e5d4 req-8f176319-edc8-45c7-9bd9-4c03445d3732 service nova] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Received unexpected event network-vif-plugged-258d5ba9-8361-403d-a5f2-5ee0331408bb for instance with vm_state building and task_state spawning. [ 1102.658464] env[62740]: DEBUG nova.network.neutron [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Successfully updated port: 258d5ba9-8361-403d-a5f2-5ee0331408bb {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1102.658958] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 5fe198a42ecf421a8f8652b1f2361307 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1102.673668] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fe198a42ecf421a8f8652b1f2361307 [ 1102.674476] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "refresh_cache-6ca702af-1a5c-40bb-b6c7-2f55ca308c02" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.674619] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquired lock "refresh_cache-6ca702af-1a5c-40bb-b6c7-2f55ca308c02" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.674771] env[62740]: DEBUG nova.network.neutron [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1102.675322] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg dae16f4d635f4998acac4750a47ff7a5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1102.687258] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dae16f4d635f4998acac4750a47ff7a5 [ 1102.739925] env[62740]: DEBUG nova.network.neutron [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1103.190559] env[62740]: DEBUG nova.network.neutron [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Updating instance_info_cache with network_info: [{"id": "258d5ba9-8361-403d-a5f2-5ee0331408bb", "address": "fa:16:3e:dc:c1:ce", "network": {"id": "70fb46bd-81f8-49d4-bb8f-cb2fb8b6944e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2105504489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "511c682375de4d5294723b0d656190d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap258d5ba9-83", "ovs_interfaceid": "258d5ba9-8361-403d-a5f2-5ee0331408bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.191096] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg af021cb0917f498b8ab5f6fe0c1840f1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1103.204942] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af021cb0917f498b8ab5f6fe0c1840f1 [ 1103.205589] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Releasing lock "refresh_cache-6ca702af-1a5c-40bb-b6c7-2f55ca308c02" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.205881] env[62740]: DEBUG nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Instance network_info: |[{"id": "258d5ba9-8361-403d-a5f2-5ee0331408bb", "address": "fa:16:3e:dc:c1:ce", "network": {"id": "70fb46bd-81f8-49d4-bb8f-cb2fb8b6944e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2105504489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "511c682375de4d5294723b0d656190d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap258d5ba9-83", "ovs_interfaceid": "258d5ba9-8361-403d-a5f2-5ee0331408bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1103.206324] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:c1:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77ccbd87-ecfd-4b2d-a1ea-29774addcef6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '258d5ba9-8361-403d-a5f2-5ee0331408bb', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1103.213763] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Creating folder: Project (511c682375de4d5294723b0d656190d2). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1103.214309] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d9757cf-3a66-4817-bdad-548415a55864 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.225741] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Created folder: Project (511c682375de4d5294723b0d656190d2) in parent group-v156037. [ 1103.225925] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Creating folder: Instances. Parent ref: group-v156128. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1103.226156] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c39a168c-51d0-4e0c-bced-2194597b6589 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.234735] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Created folder: Instances in parent group-v156128. [ 1103.234956] env[62740]: DEBUG oslo.service.loopingcall [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1103.235147] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1103.235338] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49ef919b-7751-4ac3-ba7f-5829cf577887 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.254231] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1103.254231] env[62740]: value = "task-640203" [ 1103.254231] env[62740]: _type = "Task" [ 1103.254231] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.261692] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640203, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.763619] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640203, 'name': CreateVM_Task, 'duration_secs': 0.306305} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.763894] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1103.764444] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1103.764613] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.764936] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1103.765199] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9decbcdc-b2e8-486f-8ab8-81545205a390 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.769397] env[62740]: DEBUG oslo_vmware.api [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for the task: (returnval){ [ 1103.769397] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]526227ab-f8c3-8876-8133-35c13b947373" [ 1103.769397] env[62740]: _type = "Task" [ 1103.769397] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.776539] env[62740]: DEBUG oslo_vmware.api [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]526227ab-f8c3-8876-8133-35c13b947373, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.280335] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1104.280603] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1104.280836] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.610192] env[62740]: DEBUG nova.compute.manager [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Received event network-changed-e5a583ec-2110-4864-aff5-95258c10a46d {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1104.610301] env[62740]: DEBUG nova.compute.manager [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Refreshing instance network info cache due to event network-changed-e5a583ec-2110-4864-aff5-95258c10a46d. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1104.610444] env[62740]: DEBUG oslo_concurrency.lockutils [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] Acquiring lock "refresh_cache-cf00af51-2b31-4b99-a692-8b0851dd74b8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.610642] env[62740]: DEBUG oslo_concurrency.lockutils [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] Acquired lock "refresh_cache-cf00af51-2b31-4b99-a692-8b0851dd74b8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.610854] env[62740]: DEBUG nova.network.neutron [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Refreshing network info cache for port e5a583ec-2110-4864-aff5-95258c10a46d {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1104.611297] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] Expecting reply to msg b186a4835f7545b0a3bbec20d7cb05b4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1104.619110] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b186a4835f7545b0a3bbec20d7cb05b4 [ 1104.973442] env[62740]: DEBUG nova.network.neutron [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Updated VIF entry in instance network info cache for port e5a583ec-2110-4864-aff5-95258c10a46d. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1104.973805] env[62740]: DEBUG nova.network.neutron [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Updating instance_info_cache with network_info: [{"id": "e5a583ec-2110-4864-aff5-95258c10a46d", "address": "fa:16:3e:a4:85:34", "network": {"id": "482916b0-8b1f-40cb-b4c7-ac275b827786", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-308578270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83ab31764f1a409284a3f3d77983c6d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5a583ec-21", "ovs_interfaceid": "e5a583ec-2110-4864-aff5-95258c10a46d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.974341] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] Expecting reply to msg 1c64d9ea12ae44108f04a84507e9d5b9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1104.989237] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c64d9ea12ae44108f04a84507e9d5b9 [ 1104.989907] env[62740]: DEBUG oslo_concurrency.lockutils [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] Releasing lock "refresh_cache-cf00af51-2b31-4b99-a692-8b0851dd74b8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1104.990165] env[62740]: DEBUG nova.compute.manager [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Received event network-changed-258d5ba9-8361-403d-a5f2-5ee0331408bb {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1104.990340] env[62740]: DEBUG nova.compute.manager [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Refreshing instance network info cache due to event network-changed-258d5ba9-8361-403d-a5f2-5ee0331408bb. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1104.990549] env[62740]: DEBUG oslo_concurrency.lockutils [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] Acquiring lock "refresh_cache-6ca702af-1a5c-40bb-b6c7-2f55ca308c02" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.990715] env[62740]: DEBUG oslo_concurrency.lockutils [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] Acquired lock "refresh_cache-6ca702af-1a5c-40bb-b6c7-2f55ca308c02" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.990903] env[62740]: DEBUG nova.network.neutron [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Refreshing network info cache for port 258d5ba9-8361-403d-a5f2-5ee0331408bb {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1104.991374] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] Expecting reply to msg 5761c3c61c0948868e8a3af64b432d77 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1105.000931] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5761c3c61c0948868e8a3af64b432d77 [ 1105.305616] env[62740]: DEBUG nova.network.neutron [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Updated VIF entry in instance network info cache for port 258d5ba9-8361-403d-a5f2-5ee0331408bb. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1105.305743] env[62740]: DEBUG nova.network.neutron [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Updating instance_info_cache with network_info: [{"id": "258d5ba9-8361-403d-a5f2-5ee0331408bb", "address": "fa:16:3e:dc:c1:ce", "network": {"id": "70fb46bd-81f8-49d4-bb8f-cb2fb8b6944e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2105504489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "511c682375de4d5294723b0d656190d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap258d5ba9-83", "ovs_interfaceid": "258d5ba9-8361-403d-a5f2-5ee0331408bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.306287] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] Expecting reply to msg feee0a031e40437a83094ae638224bfb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1105.321112] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg feee0a031e40437a83094ae638224bfb [ 1105.321749] env[62740]: DEBUG oslo_concurrency.lockutils [req-18f33154-88b8-4f45-8ab4-c244f102f6bc req-bfdad95a-0014-40d7-af34-e6987c93d711 service nova] Releasing lock "refresh_cache-6ca702af-1a5c-40bb-b6c7-2f55ca308c02" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1118.637011] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg aa263199e922414e9e0d95a132e33cab in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1118.647010] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa263199e922414e9e0d95a132e33cab [ 1118.647688] env[62740]: INFO nova.compute.manager [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Rebuilding instance [ 1118.687963] env[62740]: DEBUG nova.compute.manager [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Checking state {{(pid=62740) _get_power_state /opt/stack/nova/nova/compute/manager.py:1782}} [ 1118.688851] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac814d93-c54c-4717-9df2-6b65798426f2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.696598] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 645a650111634d0786ee49b983834209 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1118.725855] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 645a650111634d0786ee49b983834209 [ 1118.727502] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Powering off the VM {{(pid=62740) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1118.727996] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8152954a-99b1-42ab-936e-04f2f6bf0d08 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.735085] env[62740]: DEBUG oslo_vmware.api [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Waiting for the task: (returnval){ [ 1118.735085] env[62740]: value = "task-640204" [ 1118.735085] env[62740]: _type = "Task" [ 1118.735085] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.743942] env[62740]: DEBUG oslo_vmware.api [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640204, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.246759] env[62740]: DEBUG oslo_vmware.api [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640204, 'name': PowerOffVM_Task, 'duration_secs': 0.1492} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.247080] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Powered off the VM {{(pid=62740) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1119.247764] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Powering off the VM {{(pid=62740) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1119.248034] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8d64267-b08a-4542-ac82-535b616bd8ea {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.254749] env[62740]: DEBUG oslo_vmware.api [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Waiting for the task: (returnval){ [ 1119.254749] env[62740]: value = "task-640205" [ 1119.254749] env[62740]: _type = "Task" [ 1119.254749] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.263420] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] VM already powered off {{(pid=62740) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1119.263775] env[62740]: DEBUG nova.virt.vmwareapi.volumeops [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Volume detach. Driver type: vmdk {{(pid=62740) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1119.263852] env[62740]: DEBUG nova.virt.vmwareapi.volumeops [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-156106', 'volume_id': 'a8b46b50-b319-476c-a9dc-31e9571fd2b0', 'name': 'volume-a8b46b50-b319-476c-a9dc-31e9571fd2b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cf00af51-2b31-4b99-a692-8b0851dd74b8', 'attached_at': '', 'detached_at': '', 'volume_id': 'a8b46b50-b319-476c-a9dc-31e9571fd2b0', 'serial': 'a8b46b50-b319-476c-a9dc-31e9571fd2b0'} {{(pid=62740) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1119.264598] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1f5139-970f-4aa3-a450-17e776cf519a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.283122] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab5a3a28-c08f-4d0b-9584-bffb07199529 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.290371] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc006d7-c4ef-430d-98e0-16a5b084657b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.310208] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19704a2c-77a8-4118-b7e8-945b612ff04a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.325558] env[62740]: DEBUG nova.virt.vmwareapi.volumeops [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] The volume has not been displaced from its original location: [datastore1] volume-a8b46b50-b319-476c-a9dc-31e9571fd2b0/volume-a8b46b50-b319-476c-a9dc-31e9571fd2b0.vmdk. No consolidation needed. {{(pid=62740) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1119.330897] env[62740]: DEBUG nova.virt.vmwareapi.volumeops [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Reconfiguring VM instance instance-00000037 to detach disk 2000 {{(pid=62740) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1119.331258] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bdace54-70de-45af-969d-914ff7ddb196 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.350886] env[62740]: DEBUG oslo_vmware.api [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Waiting for the task: (returnval){ [ 1119.350886] env[62740]: value = "task-640206" [ 1119.350886] env[62740]: _type = "Task" [ 1119.350886] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.359523] env[62740]: DEBUG oslo_vmware.api [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640206, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.860689] env[62740]: DEBUG oslo_vmware.api [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640206, 'name': ReconfigVM_Task, 'duration_secs': 0.15029} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.861071] env[62740]: DEBUG nova.virt.vmwareapi.volumeops [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Reconfigured VM instance instance-00000037 to detach disk 2000 {{(pid=62740) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1119.865681] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f10fae7c-997d-4c59-9ee1-4d36ab130d89 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.880830] env[62740]: DEBUG oslo_vmware.api [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Waiting for the task: (returnval){ [ 1119.880830] env[62740]: value = "task-640207" [ 1119.880830] env[62740]: _type = "Task" [ 1119.880830] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.889046] env[62740]: DEBUG oslo_vmware.api [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640207, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.392105] env[62740]: DEBUG oslo_vmware.api [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640207, 'name': ReconfigVM_Task, 'duration_secs': 0.155205} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.392105] env[62740]: DEBUG nova.virt.vmwareapi.volumeops [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-156106', 'volume_id': 'a8b46b50-b319-476c-a9dc-31e9571fd2b0', 'name': 'volume-a8b46b50-b319-476c-a9dc-31e9571fd2b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cf00af51-2b31-4b99-a692-8b0851dd74b8', 'attached_at': '', 'detached_at': '', 'volume_id': 'a8b46b50-b319-476c-a9dc-31e9571fd2b0', 'serial': 'a8b46b50-b319-476c-a9dc-31e9571fd2b0'} {{(pid=62740) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1120.392105] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1120.392659] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c099f5fe-a041-428a-9c13-372c21d20f22 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.399977] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1120.400260] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22af0961-e228-499f-aa61-00e3da934786 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.461451] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1120.461653] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1120.461835] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Deleting the datastore file [datastore1] cf00af51-2b31-4b99-a692-8b0851dd74b8 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1120.462113] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-213a77d9-27c1-4a27-bcc2-38f166c72501 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.469988] env[62740]: DEBUG oslo_vmware.api [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Waiting for the task: (returnval){ [ 1120.469988] env[62740]: value = "task-640209" [ 1120.469988] env[62740]: _type = "Task" [ 1120.469988] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.481549] env[62740]: DEBUG oslo_vmware.api [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640209, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.979056] env[62740]: DEBUG oslo_vmware.api [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Task: {'id': task-640209, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081524} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.979399] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1120.979663] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1120.979864] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1121.035651] env[62740]: DEBUG nova.virt.vmwareapi.volumeops [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Volume detach. Driver type: vmdk {{(pid=62740) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1121.035980] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4925c058-f197-4ab2-9cb2-910138eb4f75 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.044177] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1fbf40-807e-410d-b241-fa1cebf45087 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.071967] env[62740]: ERROR nova.compute.manager [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Failed to detach volume a8b46b50-b319-476c-a9dc-31e9571fd2b0 from /dev/sda: nova.exception.InstanceNotFound: Instance cf00af51-2b31-4b99-a692-8b0851dd74b8 could not be found. [ 1121.071967] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Traceback (most recent call last): [ 1121.071967] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/compute/manager.py", line 4133, in _do_rebuild_instance [ 1121.071967] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] self.driver.rebuild(**kwargs) [ 1121.071967] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/virt/driver.py", line 390, in rebuild [ 1121.071967] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] raise NotImplementedError() [ 1121.071967] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] NotImplementedError [ 1121.071967] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] [ 1121.071967] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] During handling of the above exception, another exception occurred: [ 1121.071967] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] [ 1121.071967] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Traceback (most recent call last): [ 1121.071967] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/compute/manager.py", line 3556, in _detach_root_volume [ 1121.071967] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] self.driver.detach_volume(context, old_connection_info, [ 1121.072497] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 552, in detach_volume [ 1121.072497] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] return self._volumeops.detach_volume(connection_info, instance) [ 1121.072497] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1121.072497] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] self._detach_volume_vmdk(connection_info, instance) [ 1121.072497] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1121.072497] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1121.072497] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 1121.072497] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] stable_ref.fetch_moref(session) [ 1121.072497] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 1121.072497] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1121.072497] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] nova.exception.InstanceNotFound: Instance cf00af51-2b31-4b99-a692-8b0851dd74b8 could not be found. [ 1121.072497] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] [ 1121.204527] env[62740]: DEBUG nova.compute.utils [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Build of instance cf00af51-2b31-4b99-a692-8b0851dd74b8 aborted: Failed to rebuild volume backed instance. {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1121.208756] env[62740]: ERROR nova.compute.manager [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance cf00af51-2b31-4b99-a692-8b0851dd74b8 aborted: Failed to rebuild volume backed instance. [ 1121.208756] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Traceback (most recent call last): [ 1121.208756] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/compute/manager.py", line 4133, in _do_rebuild_instance [ 1121.208756] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] self.driver.rebuild(**kwargs) [ 1121.208756] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/virt/driver.py", line 390, in rebuild [ 1121.208756] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] raise NotImplementedError() [ 1121.208756] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] NotImplementedError [ 1121.208756] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] [ 1121.208756] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] During handling of the above exception, another exception occurred: [ 1121.208756] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] [ 1121.208756] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Traceback (most recent call last): [ 1121.208756] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/compute/manager.py", line 3591, in _rebuild_volume_backed_instance [ 1121.209142] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] self._detach_root_volume(context, instance, root_bdm) [ 1121.209142] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/compute/manager.py", line 3570, in _detach_root_volume [ 1121.209142] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] with excutils.save_and_reraise_exception(): [ 1121.209142] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1121.209142] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] self.force_reraise() [ 1121.209142] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1121.209142] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] raise self.value [ 1121.209142] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/compute/manager.py", line 3556, in _detach_root_volume [ 1121.209142] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] self.driver.detach_volume(context, old_connection_info, [ 1121.209142] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 552, in detach_volume [ 1121.209142] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] return self._volumeops.detach_volume(connection_info, instance) [ 1121.209142] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1121.209142] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] self._detach_volume_vmdk(connection_info, instance) [ 1121.209489] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1121.209489] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1121.209489] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 1121.209489] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] stable_ref.fetch_moref(session) [ 1121.209489] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 1121.209489] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1121.209489] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] nova.exception.InstanceNotFound: Instance cf00af51-2b31-4b99-a692-8b0851dd74b8 could not be found. [ 1121.209489] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] [ 1121.209489] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] During handling of the above exception, another exception occurred: [ 1121.209489] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] [ 1121.209489] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Traceback (most recent call last): [ 1121.209489] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/compute/manager.py", line 10854, in _error_out_instance_on_exception [ 1121.209489] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] yield [ 1121.209489] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/compute/manager.py", line 3859, in rebuild_instance [ 1121.209864] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] self._do_rebuild_instance_with_claim( [ 1121.209864] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/compute/manager.py", line 3945, in _do_rebuild_instance_with_claim [ 1121.209864] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] self._do_rebuild_instance( [ 1121.209864] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/compute/manager.py", line 4137, in _do_rebuild_instance [ 1121.209864] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] self._rebuild_default_impl(**kwargs) [ 1121.209864] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/compute/manager.py", line 3714, in _rebuild_default_impl [ 1121.209864] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] self._rebuild_volume_backed_instance( [ 1121.209864] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] File "/opt/stack/nova/nova/compute/manager.py", line 3606, in _rebuild_volume_backed_instance [ 1121.209864] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] raise exception.BuildAbortException( [ 1121.209864] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] nova.exception.BuildAbortException: Build of instance cf00af51-2b31-4b99-a692-8b0851dd74b8 aborted: Failed to rebuild volume backed instance. [ 1121.209864] env[62740]: ERROR nova.compute.manager [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] [ 1121.210623] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 7fd72924ffa24d32826942f406a4f4d3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1121.244010] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7fd72924ffa24d32826942f406a4f4d3 [ 1121.245975] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg cc790b13e7274a8a80fcb9a2924a9af5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1121.264519] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc790b13e7274a8a80fcb9a2924a9af5 [ 1121.265698] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg b9d46ed95c764d318a79eb2eff4c6393 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1121.290521] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9d46ed95c764d318a79eb2eff4c6393 [ 1121.292866] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 1688510a091447dbb0187af5f7b342a6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1121.327119] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1688510a091447dbb0187af5f7b342a6 [ 1121.329061] env[62740]: DEBUG oslo_concurrency.lockutils [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.330033] env[62740]: DEBUG oslo_concurrency.lockutils [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.533385] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f8b3f8d-d931-412e-a9f1-7677d847ae8e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.541182] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d148135d-f40f-4190-b952-7bd7088d0029 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.572542] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969321f1-742a-43bd-89cd-19ed00b4a03b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.579781] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21410daf-f856-4c8e-97d4-e3ca498630b3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.592382] env[62740]: DEBUG nova.compute.provider_tree [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1121.592862] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 6e23b543de564365b1b1ec8a4d0b3107 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1121.601474] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e23b543de564365b1b1ec8a4d0b3107 [ 1121.602348] env[62740]: DEBUG nova.scheduler.client.report [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1121.604600] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg d2d18fa71a604ff7a673ef5c7f377d15 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1121.618647] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2d18fa71a604ff7a673ef5c7f377d15 [ 1121.619524] env[62740]: DEBUG oslo_concurrency.lockutils [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.290s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.619770] env[62740]: INFO nova.compute.manager [None req-75aa34a6-d0d3-45e6-9adf-5e159da44bef tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Successfully reverted task state from rebuilding on failure for instance. [ 1121.656451] env[62740]: WARNING oslo_vmware.rw_handles [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1121.656451] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1121.656451] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1121.656451] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1121.656451] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1121.656451] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1121.656451] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1121.656451] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1121.656451] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1121.656451] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1121.656451] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1121.656451] env[62740]: ERROR oslo_vmware.rw_handles [ 1121.656921] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/2b4b9b4b-dfd3-4406-b05e-5371a5e833b7/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1121.659049] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1121.659309] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Copying Virtual Disk [datastore2] vmware_temp/2b4b9b4b-dfd3-4406-b05e-5371a5e833b7/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/2b4b9b4b-dfd3-4406-b05e-5371a5e833b7/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1121.659875] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fcfba953-6f82-4c5e-bf20-8e4c101b9ba7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.667763] env[62740]: DEBUG oslo_vmware.api [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Waiting for the task: (returnval){ [ 1121.667763] env[62740]: value = "task-640210" [ 1121.667763] env[62740]: _type = "Task" [ 1121.667763] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.675568] env[62740]: DEBUG oslo_vmware.api [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Task: {'id': task-640210, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.179744] env[62740]: DEBUG oslo_vmware.exceptions [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1122.180083] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1122.180542] env[62740]: ERROR nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1122.180542] env[62740]: Faults: ['InvalidArgument'] [ 1122.180542] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Traceback (most recent call last): [ 1122.180542] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1122.180542] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] yield resources [ 1122.180542] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1122.180542] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] self.driver.spawn(context, instance, image_meta, [ 1122.180542] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1122.180542] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1122.180542] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1122.180542] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] self._fetch_image_if_missing(context, vi) [ 1122.180542] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1122.180936] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] image_cache(vi, tmp_image_ds_loc) [ 1122.180936] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1122.180936] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] vm_util.copy_virtual_disk( [ 1122.180936] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1122.180936] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] session._wait_for_task(vmdk_copy_task) [ 1122.180936] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1122.180936] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] return self.wait_for_task(task_ref) [ 1122.180936] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1122.180936] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] return evt.wait() [ 1122.180936] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1122.180936] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] result = hub.switch() [ 1122.180936] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1122.180936] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] return self.greenlet.switch() [ 1122.181366] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1122.181366] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] self.f(*self.args, **self.kw) [ 1122.181366] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1122.181366] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] raise exceptions.translate_fault(task_info.error) [ 1122.181366] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1122.181366] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Faults: ['InvalidArgument'] [ 1122.181366] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] [ 1122.181366] env[62740]: INFO nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Terminating instance [ 1122.182963] env[62740]: DEBUG nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1122.183177] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1122.183522] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.183657] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1122.184373] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237bb72b-300a-4c1b-abeb-6dfad0edc006 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.186835] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3da0ad15-816f-4a36-8a82-ba1ccd0bfc46 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.192541] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1122.192758] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c25de4d-732a-4342-bbb6-fc5774379b5d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.194805] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1122.194974] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1122.195887] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8830ac47-b341-4dd7-866e-b761d6c52b6d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.200379] env[62740]: DEBUG oslo_vmware.api [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for the task: (returnval){ [ 1122.200379] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5240e6be-3859-28c1-e961-3a4205771470" [ 1122.200379] env[62740]: _type = "Task" [ 1122.200379] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.207286] env[62740]: DEBUG oslo_vmware.api [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5240e6be-3859-28c1-e961-3a4205771470, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.259613] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1122.259853] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1122.260057] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Deleting the datastore file [datastore2] f22357ec-450c-4545-8822-74b83bfc5a35 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1122.260343] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4383b5e8-df65-4b99-80f6-11e78f3955a9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.266586] env[62740]: DEBUG oslo_vmware.api [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Waiting for the task: (returnval){ [ 1122.266586] env[62740]: value = "task-640212" [ 1122.266586] env[62740]: _type = "Task" [ 1122.266586] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.274154] env[62740]: DEBUG oslo_vmware.api [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Task: {'id': task-640212, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.397462] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 12737965000649888a2a4ff8a10daa38 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1122.408024] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12737965000649888a2a4ff8a10daa38 [ 1122.408362] env[62740]: DEBUG oslo_concurrency.lockutils [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Acquiring lock "cf00af51-2b31-4b99-a692-8b0851dd74b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1122.408719] env[62740]: DEBUG oslo_concurrency.lockutils [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Lock "cf00af51-2b31-4b99-a692-8b0851dd74b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.408826] env[62740]: DEBUG oslo_concurrency.lockutils [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Acquiring lock "cf00af51-2b31-4b99-a692-8b0851dd74b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1122.409106] env[62740]: DEBUG oslo_concurrency.lockutils [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Lock "cf00af51-2b31-4b99-a692-8b0851dd74b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.409528] env[62740]: DEBUG oslo_concurrency.lockutils [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Lock "cf00af51-2b31-4b99-a692-8b0851dd74b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.411599] env[62740]: INFO nova.compute.manager [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Terminating instance [ 1122.413652] env[62740]: DEBUG nova.compute.manager [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1122.414306] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa77ee26-840f-4f25-a359-cd8c5e4142de {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.422613] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35229609-874d-402e-86c5-d891eb648446 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.449880] env[62740]: WARNING nova.virt.vmwareapi.driver [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance cf00af51-2b31-4b99-a692-8b0851dd74b8 could not be found. [ 1122.450096] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1122.450391] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-19f1090b-349a-49db-90a2-513e93e660b5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.457535] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c3f299-9643-438c-bbde-e42c7b4ead1a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.484963] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cf00af51-2b31-4b99-a692-8b0851dd74b8 could not be found. [ 1122.485211] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1122.485508] env[62740]: INFO nova.compute.manager [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Took 0.07 seconds to destroy the instance on the hypervisor. [ 1122.485834] env[62740]: DEBUG oslo.service.loopingcall [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1122.486072] env[62740]: DEBUG nova.compute.manager [-] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1122.486171] env[62740]: DEBUG nova.network.neutron [-] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1122.711573] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1122.711803] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Creating directory with path [datastore2] vmware_temp/b0737379-5ab7-48ce-bc15-353dd8126301/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1122.712040] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a638901b-fdfc-4996-ad15-2c1e50cd1f3d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.722565] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Created directory with path [datastore2] vmware_temp/b0737379-5ab7-48ce-bc15-353dd8126301/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1122.722761] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Fetch image to [datastore2] vmware_temp/b0737379-5ab7-48ce-bc15-353dd8126301/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1122.722935] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/b0737379-5ab7-48ce-bc15-353dd8126301/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1122.723689] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9422b466-a013-4fdb-9547-adbfe8f5bdad {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.730706] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a0c67b-9d89-4ccc-b712-cb676c7afe0b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.739899] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7260a742-9b03-403d-875e-752f4db37bee {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.773433] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e197d7-cbe3-4e92-bfc5-4c72fb58105a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.780469] env[62740]: DEBUG oslo_vmware.api [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Task: {'id': task-640212, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06428} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.781873] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1122.782081] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1122.782260] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1122.782436] env[62740]: INFO nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1122.784494] env[62740]: DEBUG nova.compute.claims [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1122.784676] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1122.784888] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.786827] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 474c8339d0c04cbbae4299b20c23157e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1122.788369] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-09868a1b-7848-45d1-a0cb-a2cc85a83ab3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.809446] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1122.856717] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 474c8339d0c04cbbae4299b20c23157e [ 1122.865345] env[62740]: DEBUG oslo_vmware.rw_handles [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b0737379-5ab7-48ce-bc15-353dd8126301/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1122.930774] env[62740]: DEBUG oslo_vmware.rw_handles [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1122.930965] env[62740]: DEBUG oslo_vmware.rw_handles [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b0737379-5ab7-48ce-bc15-353dd8126301/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1123.144110] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74175c0b-eeb0-4e2a-a3c7-ce01b96a4ccc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.152277] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77df49b0-9b36-4d29-b640-a429d7a94549 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.183413] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f8b4e9-6400-461f-b9b8-eafe8d0fbdf6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.191195] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07611388-3f2d-40fd-a75d-d797206a934c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.204905] env[62740]: DEBUG nova.compute.provider_tree [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1123.205599] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg e141f616a23a46128b1b47936f86fbe2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.213599] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e141f616a23a46128b1b47936f86fbe2 [ 1123.214620] env[62740]: DEBUG nova.scheduler.client.report [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1123.217128] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg bc1d167e03df4dbf86be45c8564e716a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.231696] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc1d167e03df4dbf86be45c8564e716a [ 1123.232560] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.448s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.233213] env[62740]: ERROR nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1123.233213] env[62740]: Faults: ['InvalidArgument'] [ 1123.233213] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Traceback (most recent call last): [ 1123.233213] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1123.233213] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] self.driver.spawn(context, instance, image_meta, [ 1123.233213] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1123.233213] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1123.233213] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1123.233213] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] self._fetch_image_if_missing(context, vi) [ 1123.233213] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1123.233213] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] image_cache(vi, tmp_image_ds_loc) [ 1123.233213] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1123.233621] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] vm_util.copy_virtual_disk( [ 1123.233621] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1123.233621] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] session._wait_for_task(vmdk_copy_task) [ 1123.233621] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1123.233621] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] return self.wait_for_task(task_ref) [ 1123.233621] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1123.233621] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] return evt.wait() [ 1123.233621] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1123.233621] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] result = hub.switch() [ 1123.233621] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1123.233621] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] return self.greenlet.switch() [ 1123.233621] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1123.233621] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] self.f(*self.args, **self.kw) [ 1123.234074] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1123.234074] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] raise exceptions.translate_fault(task_info.error) [ 1123.234074] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1123.234074] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Faults: ['InvalidArgument'] [ 1123.234074] env[62740]: ERROR nova.compute.manager [instance: f22357ec-450c-4545-8822-74b83bfc5a35] [ 1123.236472] env[62740]: DEBUG nova.compute.utils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1123.236971] env[62740]: DEBUG nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Build of instance f22357ec-450c-4545-8822-74b83bfc5a35 was re-scheduled: A specified parameter was not correct: fileType [ 1123.236971] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1123.237522] env[62740]: DEBUG nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1123.237803] env[62740]: DEBUG nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1123.238096] env[62740]: DEBUG nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1123.238737] env[62740]: DEBUG nova.network.neutron [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1123.263376] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f70ad98977b34aa2bd221d0c1cda080d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.273629] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f70ad98977b34aa2bd221d0c1cda080d [ 1123.273813] env[62740]: DEBUG nova.network.neutron [-] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.274445] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c905d9c2b86e459f98bac39eb72e6db7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.288548] env[62740]: DEBUG nova.compute.manager [req-74b38814-aec4-4aae-8b53-dfd747e17b5c req-04bfc192-e25e-4d95-a4c1-ba5a03ad5bd0 service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Received event network-vif-deleted-e5a583ec-2110-4864-aff5-95258c10a46d {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1123.289049] env[62740]: INFO nova.compute.manager [req-74b38814-aec4-4aae-8b53-dfd747e17b5c req-04bfc192-e25e-4d95-a4c1-ba5a03ad5bd0 service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Neutron deleted interface e5a583ec-2110-4864-aff5-95258c10a46d; detaching it from the instance and deleting it from the info cache [ 1123.289579] env[62740]: DEBUG nova.network.neutron [req-74b38814-aec4-4aae-8b53-dfd747e17b5c req-04bfc192-e25e-4d95-a4c1-ba5a03ad5bd0 service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.290173] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-74b38814-aec4-4aae-8b53-dfd747e17b5c req-04bfc192-e25e-4d95-a4c1-ba5a03ad5bd0 service nova] Expecting reply to msg 78e62b27152a4bdab5ea2a792bacd0a2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.291851] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c905d9c2b86e459f98bac39eb72e6db7 [ 1123.293033] env[62740]: INFO nova.compute.manager [-] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Took 0.81 seconds to deallocate network for instance. [ 1123.301581] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78e62b27152a4bdab5ea2a792bacd0a2 [ 1123.301719] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-444d160a-4e0e-4463-8679-c5a8207735ef {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.312355] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72bc8d49-3bd6-4154-8757-943bd11ed26c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.350122] env[62740]: DEBUG nova.compute.manager [req-74b38814-aec4-4aae-8b53-dfd747e17b5c req-04bfc192-e25e-4d95-a4c1-ba5a03ad5bd0 service nova] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Detach interface failed, port_id=e5a583ec-2110-4864-aff5-95258c10a46d, reason: Instance cf00af51-2b31-4b99-a692-8b0851dd74b8 could not be found. {{(pid=62740) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10954}} [ 1123.394611] env[62740]: INFO nova.compute.manager [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Took 0.10 seconds to detach 1 volumes for instance. [ 1123.399151] env[62740]: DEBUG nova.compute.manager [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Deleting volume: a8b46b50-b319-476c-a9dc-31e9571fd2b0 {{(pid=62740) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3238}} [ 1123.444278] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg b4579a139328440781736e2dd40ed97c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.496522] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4579a139328440781736e2dd40ed97c [ 1123.497735] env[62740]: DEBUG oslo_concurrency.lockutils [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.497991] env[62740]: DEBUG oslo_concurrency.lockutils [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.498230] env[62740]: DEBUG nova.objects.instance [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Lazy-loading 'resources' on Instance uuid cf00af51-2b31-4b99-a692-8b0851dd74b8 {{(pid=62740) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1123.498599] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 427434bca426443389ac7d217f84e3e0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.510104] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 427434bca426443389ac7d217f84e3e0 [ 1123.607328] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg e1c6afbc93fe4a8d8be2514979440812 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.622971] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1c6afbc93fe4a8d8be2514979440812 [ 1123.623636] env[62740]: DEBUG nova.network.neutron [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.624088] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 2542d84e46564856af5d8074e6ee2293 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.637803] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2542d84e46564856af5d8074e6ee2293 [ 1123.638516] env[62740]: INFO nova.compute.manager [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Took 0.40 seconds to deallocate network for instance. [ 1123.640222] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 89098610a5c94107a2bebbdc19f10d00 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.685097] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89098610a5c94107a2bebbdc19f10d00 [ 1123.687684] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 5afec8de0a4d4f359b63f9f7a5b6a5ec in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.731733] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5afec8de0a4d4f359b63f9f7a5b6a5ec [ 1123.759216] env[62740]: INFO nova.scheduler.client.report [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Deleted allocations for instance f22357ec-450c-4545-8822-74b83bfc5a35 [ 1123.768751] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 9552ab0153df45c0a0eaa9c7cb175e8e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.792711] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9552ab0153df45c0a0eaa9c7cb175e8e [ 1123.793342] env[62740]: DEBUG oslo_concurrency.lockutils [None req-249f2d23-d1be-4c08-89bf-9e66364628a8 tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Lock "f22357ec-450c-4545-8822-74b83bfc5a35" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 377.817s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.793885] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg c1699ce2db3f4e22b83083ae0a311ab5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.794634] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Lock "f22357ec-450c-4545-8822-74b83bfc5a35" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 181.449s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.794854] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Acquiring lock "f22357ec-450c-4545-8822-74b83bfc5a35-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.795075] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Lock "f22357ec-450c-4545-8822-74b83bfc5a35-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.795245] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Lock "f22357ec-450c-4545-8822-74b83bfc5a35-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.800020] env[62740]: INFO nova.compute.manager [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Terminating instance [ 1123.804793] env[62740]: DEBUG nova.compute.manager [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1123.805012] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1123.807210] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2c60d45-67f9-456b-946f-d52cd26413e0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.815861] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7756e30-fd6e-443e-a159-f9b76609857c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.827312] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1699ce2db3f4e22b83083ae0a311ab5 [ 1123.829192] env[62740]: DEBUG nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1123.831382] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg 16c58bf0845e44f3a761563345e4bccb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.832806] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd26044-5845-451a-8f69-4f480e1fa9ad {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.841348] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde456a7-f685-434a-b71d-3b171e89a6a6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.853498] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f22357ec-450c-4545-8822-74b83bfc5a35 could not be found. [ 1123.853635] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1123.853813] env[62740]: INFO nova.compute.manager [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1123.854093] env[62740]: DEBUG oslo.service.loopingcall [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1123.855740] env[62740]: DEBUG nova.compute.manager [-] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1123.855740] env[62740]: DEBUG nova.network.neutron [-] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1123.886543] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16c58bf0845e44f3a761563345e4bccb [ 1123.890311] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89b80e9-8778-48ae-80a6-d25c18b2bd23 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.900787] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21f7a03-0b3e-42a9-aa64-e69d7f289c28 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.906293] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.917155] env[62740]: DEBUG nova.compute.provider_tree [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1123.917649] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 339b41affe2e45d799bdf5b150dc7952 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.919193] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8fd5e6982423478ba111f172625feb47 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.925796] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 339b41affe2e45d799bdf5b150dc7952 [ 1123.926217] env[62740]: DEBUG nova.scheduler.client.report [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1123.928553] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 38adfcb6563540dfbf2f7d4f29cde51c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.929446] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8fd5e6982423478ba111f172625feb47 [ 1123.929785] env[62740]: DEBUG nova.network.neutron [-] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.930121] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7e5617a57a544d159f832763691cc566 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.938627] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e5617a57a544d159f832763691cc566 [ 1123.939051] env[62740]: INFO nova.compute.manager [-] [instance: f22357ec-450c-4545-8822-74b83bfc5a35] Took 0.08 seconds to deallocate network for instance. [ 1123.942584] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 8202cbc0f7c34f899114af11ac488b12 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.943649] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38adfcb6563540dfbf2f7d4f29cde51c [ 1123.944352] env[62740]: DEBUG oslo_concurrency.lockutils [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.446s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.946784] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.041s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.948559] env[62740]: INFO nova.compute.claims [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1123.949914] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg d0334b005da54a1f8b32309a4cfbea97 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.958377] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg d1c0572af21e4c189bd9d5f0b191cc29 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.983398] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8202cbc0f7c34f899114af11ac488b12 [ 1123.991577] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0334b005da54a1f8b32309a4cfbea97 [ 1123.993182] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg 2d8c9873be0043f7bae93b924012c7aa in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1123.997845] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg 6056611fd63a453bb7ae77aaa0cffdc7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1124.003045] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d8c9873be0043f7bae93b924012c7aa [ 1124.017794] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1c0572af21e4c189bd9d5f0b191cc29 [ 1124.023654] env[62740]: DEBUG oslo_concurrency.lockutils [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Lock "cf00af51-2b31-4b99-a692-8b0851dd74b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 1.615s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.024060] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-10b0d1c2-15f3-470a-b1ab-4cdb5962d96f tempest-ServerActionsV293TestJSON-1211890765 tempest-ServerActionsV293TestJSON-1211890765-project-member] Expecting reply to msg 743a092e0eb54496a99d252b655c1e7e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1124.036114] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6056611fd63a453bb7ae77aaa0cffdc7 [ 1124.036580] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 743a092e0eb54496a99d252b655c1e7e [ 1124.041639] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Lock "f22357ec-450c-4545-8822-74b83bfc5a35" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.247s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.041959] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5df9e5ad-fb4b-4156-bac8-94416c7fbbad tempest-ServerRescueTestJSON-1093684156 tempest-ServerRescueTestJSON-1093684156-project-member] Expecting reply to msg bfd1c0a34b4842eb82c13caa97722862 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1124.052337] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfd1c0a34b4842eb82c13caa97722862 [ 1124.201542] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249e7db5-841e-4259-88a9-f659306fdac2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.209379] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e458553e-31cb-47cc-b31a-5de1566eef6c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.238420] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a780cb50-cea4-4a62-8b90-8bbb54038b4d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.245392] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5068f491-a6cc-4953-9661-84b6b9b6ba53 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.259276] env[62740]: DEBUG nova.compute.provider_tree [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.259796] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg 6984f2681ed3437d8ad74995073b47d5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1124.268811] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6984f2681ed3437d8ad74995073b47d5 [ 1124.269783] env[62740]: DEBUG nova.scheduler.client.report [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1124.272104] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg 35d3cfa3a4ff4d76becc7f1a9e14eec3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1124.286522] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35d3cfa3a4ff4d76becc7f1a9e14eec3 [ 1124.287761] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.340s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.287887] env[62740]: DEBUG nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1124.289715] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg c67ccdf2bc45443a897b2fe64acf67e3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1124.328132] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c67ccdf2bc45443a897b2fe64acf67e3 [ 1124.330067] env[62740]: DEBUG nova.compute.utils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1124.330683] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg cec5f3b3d7bc4829bdfedcc2d792b4a4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1124.331806] env[62740]: DEBUG nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1124.332091] env[62740]: DEBUG nova.network.neutron [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1124.347075] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cec5f3b3d7bc4829bdfedcc2d792b4a4 [ 1124.347882] env[62740]: DEBUG nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1124.349614] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg 62ff18609c2f47d889120d843f2b33d5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1124.402805] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62ff18609c2f47d889120d843f2b33d5 [ 1124.405795] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg b36ff0d66bcf4bd6abd7900f29823560 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1124.440027] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b36ff0d66bcf4bd6abd7900f29823560 [ 1124.441876] env[62740]: DEBUG nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1124.460271] env[62740]: DEBUG nova.policy [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54de6490541043d38564d761dfb0b7af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0394a4f1f9fd4b84b68187bc9fe8b41f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1124.473101] env[62740]: DEBUG nova.virt.hardware [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1124.473101] env[62740]: DEBUG nova.virt.hardware [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1124.473101] env[62740]: DEBUG nova.virt.hardware [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1124.473275] env[62740]: DEBUG nova.virt.hardware [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1124.473313] env[62740]: DEBUG nova.virt.hardware [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1124.473471] env[62740]: DEBUG nova.virt.hardware [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1124.473662] env[62740]: DEBUG nova.virt.hardware [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1124.473817] env[62740]: DEBUG nova.virt.hardware [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1124.473979] env[62740]: DEBUG nova.virt.hardware [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1124.474153] env[62740]: DEBUG nova.virt.hardware [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1124.474322] env[62740]: DEBUG nova.virt.hardware [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1124.475201] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c434d5-b226-4948-b08a-87b6b16dad55 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.483136] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463cb46c-1f97-4323-bd61-5285cd9111c8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.260804] env[62740]: DEBUG nova.network.neutron [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Successfully created port: 6955bb26-1b2e-4da3-a7fa-6a2e36682e1d {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1125.891562] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1125.891769] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Cleaning up deleted instances with incomplete migration {{(pid=62740) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11255}} [ 1125.892081] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 17f40c56eec64dcc9ae60a66574d1e46 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1125.901319] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17f40c56eec64dcc9ae60a66574d1e46 [ 1126.835642] env[62740]: DEBUG nova.compute.manager [req-784c8b18-84d7-49ec-b2b1-e300e20121fc req-84987f3f-e0fd-47a9-aed7-174f080d3aa6 service nova] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Received event network-vif-plugged-6955bb26-1b2e-4da3-a7fa-6a2e36682e1d {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1126.835962] env[62740]: DEBUG oslo_concurrency.lockutils [req-784c8b18-84d7-49ec-b2b1-e300e20121fc req-84987f3f-e0fd-47a9-aed7-174f080d3aa6 service nova] Acquiring lock "e21a5624-20ca-45d8-a0bf-dd87cec1c701-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.836096] env[62740]: DEBUG oslo_concurrency.lockutils [req-784c8b18-84d7-49ec-b2b1-e300e20121fc req-84987f3f-e0fd-47a9-aed7-174f080d3aa6 service nova] Lock "e21a5624-20ca-45d8-a0bf-dd87cec1c701-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.836271] env[62740]: DEBUG oslo_concurrency.lockutils [req-784c8b18-84d7-49ec-b2b1-e300e20121fc req-84987f3f-e0fd-47a9-aed7-174f080d3aa6 service nova] Lock "e21a5624-20ca-45d8-a0bf-dd87cec1c701-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.836445] env[62740]: DEBUG nova.compute.manager [req-784c8b18-84d7-49ec-b2b1-e300e20121fc req-84987f3f-e0fd-47a9-aed7-174f080d3aa6 service nova] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] No waiting events found dispatching network-vif-plugged-6955bb26-1b2e-4da3-a7fa-6a2e36682e1d {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1126.836608] env[62740]: WARNING nova.compute.manager [req-784c8b18-84d7-49ec-b2b1-e300e20121fc req-84987f3f-e0fd-47a9-aed7-174f080d3aa6 service nova] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Received unexpected event network-vif-plugged-6955bb26-1b2e-4da3-a7fa-6a2e36682e1d for instance with vm_state building and task_state spawning. [ 1127.060753] env[62740]: DEBUG nova.network.neutron [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Successfully updated port: 6955bb26-1b2e-4da3-a7fa-6a2e36682e1d {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1127.061633] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg 8075360fa7524fff9a7d04a56f417116 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1127.077881] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8075360fa7524fff9a7d04a56f417116 [ 1127.077881] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Acquiring lock "refresh_cache-e21a5624-20ca-45d8-a0bf-dd87cec1c701" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1127.077881] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Acquired lock "refresh_cache-e21a5624-20ca-45d8-a0bf-dd87cec1c701" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.077881] env[62740]: DEBUG nova.network.neutron [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1127.077881] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg c51acf6d85344cada8e0832b164156a3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1127.084585] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c51acf6d85344cada8e0832b164156a3 [ 1127.383043] env[62740]: DEBUG nova.network.neutron [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1127.816187] env[62740]: DEBUG nova.network.neutron [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Updating instance_info_cache with network_info: [{"id": "6955bb26-1b2e-4da3-a7fa-6a2e36682e1d", "address": "fa:16:3e:4a:c0:30", "network": {"id": "cf3ebaec-073e-4c37-a1fa-57b22598cfb5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1152817577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0394a4f1f9fd4b84b68187bc9fe8b41f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6955bb26-1b", "ovs_interfaceid": "6955bb26-1b2e-4da3-a7fa-6a2e36682e1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.816754] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg d5e2a34c8643433a856ebca825ccf282 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1127.831232] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5e2a34c8643433a856ebca825ccf282 [ 1127.831813] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Releasing lock "refresh_cache-e21a5624-20ca-45d8-a0bf-dd87cec1c701" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1127.832118] env[62740]: DEBUG nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Instance network_info: |[{"id": "6955bb26-1b2e-4da3-a7fa-6a2e36682e1d", "address": "fa:16:3e:4a:c0:30", "network": {"id": "cf3ebaec-073e-4c37-a1fa-57b22598cfb5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1152817577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0394a4f1f9fd4b84b68187bc9fe8b41f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6955bb26-1b", "ovs_interfaceid": "6955bb26-1b2e-4da3-a7fa-6a2e36682e1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1127.832641] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:c0:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '842f738f-eaa4-4444-a9bf-90d2b533184c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6955bb26-1b2e-4da3-a7fa-6a2e36682e1d', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1127.841885] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Creating folder: Project (0394a4f1f9fd4b84b68187bc9fe8b41f). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1127.842912] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9ac98c9-80da-4f46-a429-8bc151c3851f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.854219] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Created folder: Project (0394a4f1f9fd4b84b68187bc9fe8b41f) in parent group-v156037. [ 1127.854419] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Creating folder: Instances. Parent ref: group-v156131. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1127.854657] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92026169-3c6c-4c8e-9902-b22eb3fc02bf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.866205] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Created folder: Instances in parent group-v156131. [ 1127.866205] env[62740]: DEBUG oslo.service.loopingcall [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1127.866205] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1127.866456] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ac49004-3f32-4b25-9d10-644138da1ff3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.886728] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1127.886728] env[62740]: value = "task-640216" [ 1127.886728] env[62740]: _type = "Task" [ 1127.886728] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.894894] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640216, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.900802] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.400741] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640216, 'name': CreateVM_Task, 'duration_secs': 0.304534} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.400919] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1128.402048] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.402207] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.402953] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1128.402953] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f26b714-c912-4f41-b793-e99ac0117ba4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.408290] env[62740]: DEBUG oslo_vmware.api [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Waiting for the task: (returnval){ [ 1128.408290] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]525f97ef-2e17-face-97cb-835cdeeaa8a9" [ 1128.408290] env[62740]: _type = "Task" [ 1128.408290] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.416174] env[62740]: DEBUG oslo_vmware.api [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]525f97ef-2e17-face-97cb-835cdeeaa8a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.885674] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.890431] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.891078] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.891269] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.891629] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7f73d1b3709a41779cb282063a85a234 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1128.901423] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f73d1b3709a41779cb282063a85a234 [ 1128.902463] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.902679] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.902846] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.902998] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1128.904238] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717f5f96-25ef-4275-96fe-33cde4f3cea4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.916901] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0872e4-14f6-4a88-97a0-c3e0817114d7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.925820] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.926147] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1128.926495] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.936120] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2b1e85-3416-4bf9-95c5-837c5557a638 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.943833] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d9ce99-0b7d-4efe-8720-b03d15456301 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.974851] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181606MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1128.975172] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.975434] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.976407] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 03e712522a424b5e92ed1eae62d2e8b7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1128.981156] env[62740]: DEBUG nova.compute.manager [req-5e17e270-de90-4f31-a519-2b1e31b75031 req-c5f3eed4-1021-42e6-b5b4-557def243f67 service nova] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Received event network-changed-6955bb26-1b2e-4da3-a7fa-6a2e36682e1d {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1128.981156] env[62740]: DEBUG nova.compute.manager [req-5e17e270-de90-4f31-a519-2b1e31b75031 req-c5f3eed4-1021-42e6-b5b4-557def243f67 service nova] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Refreshing instance network info cache due to event network-changed-6955bb26-1b2e-4da3-a7fa-6a2e36682e1d. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1128.981297] env[62740]: DEBUG oslo_concurrency.lockutils [req-5e17e270-de90-4f31-a519-2b1e31b75031 req-c5f3eed4-1021-42e6-b5b4-557def243f67 service nova] Acquiring lock "refresh_cache-e21a5624-20ca-45d8-a0bf-dd87cec1c701" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.981529] env[62740]: DEBUG oslo_concurrency.lockutils [req-5e17e270-de90-4f31-a519-2b1e31b75031 req-c5f3eed4-1021-42e6-b5b4-557def243f67 service nova] Acquired lock "refresh_cache-e21a5624-20ca-45d8-a0bf-dd87cec1c701" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.981689] env[62740]: DEBUG nova.network.neutron [req-5e17e270-de90-4f31-a519-2b1e31b75031 req-c5f3eed4-1021-42e6-b5b4-557def243f67 service nova] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Refreshing network info cache for port 6955bb26-1b2e-4da3-a7fa-6a2e36682e1d {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1128.982220] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-5e17e270-de90-4f31-a519-2b1e31b75031 req-c5f3eed4-1021-42e6-b5b4-557def243f67 service nova] Expecting reply to msg 6464d35081334ea49a4452e3b2b51b7f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1128.994770] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6464d35081334ea49a4452e3b2b51b7f [ 1129.017157] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03e712522a424b5e92ed1eae62d2e8b7 [ 1129.022174] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ac8cf4f5f37f4ea0a64fe3f435e45f99 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1129.032255] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac8cf4f5f37f4ea0a64fe3f435e45f99 [ 1129.172442] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 697e3884-2ef4-423e-af81-e5d1e94f65a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.173080] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 732da1c8-e83e-4dd7-96c2-dbfa9468baab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.173080] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388a39df-9fa9-4153-9f3c-4ad94fd5edfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.173080] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 5f57389d-853e-4439-872a-8345664578d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.173080] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 472cd209-4192-4473-b788-d1ea342653bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.173251] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d8dac9af-0897-4fbf-8ee6-1fb3955d48c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.173251] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d6c3ca16-5c7c-41e6-9850-10221603ad2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.173312] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 158406db-7196-4826-aefa-20a58daa186b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.173390] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6ca702af-1a5c-40bb-b6c7-2f55ca308c02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.173500] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e21a5624-20ca-45d8-a0bf-dd87cec1c701 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.174088] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 1be549b3e00d45b4b23b2259813d8ca9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1129.189918] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1be549b3e00d45b4b23b2259813d8ca9 [ 1129.191042] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b0b16f66-8dbc-4e9b-a932-5de45215cfff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.191383] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg d26b8791de0f491db03d373c3b7e791c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1129.203056] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d26b8791de0f491db03d373c3b7e791c [ 1129.203863] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 56106517-e735-4bf5-8d5a-dc0d4aab3991 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.204727] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 713bf2e7afed4ba389fde32a6f569549 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1129.215381] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 713bf2e7afed4ba389fde32a6f569549 [ 1129.217354] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 43e4ddf4-230e-49f7-975f-ba99a6da9398 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.218120] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 05ceb60561f6438f8bcabd99a83b082b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1129.231995] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05ceb60561f6438f8bcabd99a83b082b [ 1129.232777] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance eba85edb-4d86-42c9-8b49-98f2173a3eeb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.233302] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg cba6d1693cf241c788ce1398bd40b466 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1129.248979] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cba6d1693cf241c788ce1398bd40b466 [ 1129.250020] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 7aacf4e0-b508-4a18-909a-3d1fe9458d98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.250530] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ebadd6856da04050a30b4599e9398531 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1129.264854] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebadd6856da04050a30b4599e9398531 [ 1129.265705] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6005c9dc-3067-4719-a8f9-befb63f7cd8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.265921] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1129.266088] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1129.276556] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "6005c9dc-3067-4719-a8f9-befb63f7cd8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.276556] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "6005c9dc-3067-4719-a8f9-befb63f7cd8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.424985] env[62740]: DEBUG nova.network.neutron [req-5e17e270-de90-4f31-a519-2b1e31b75031 req-c5f3eed4-1021-42e6-b5b4-557def243f67 service nova] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Updated VIF entry in instance network info cache for port 6955bb26-1b2e-4da3-a7fa-6a2e36682e1d. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1129.424985] env[62740]: DEBUG nova.network.neutron [req-5e17e270-de90-4f31-a519-2b1e31b75031 req-c5f3eed4-1021-42e6-b5b4-557def243f67 service nova] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Updating instance_info_cache with network_info: [{"id": "6955bb26-1b2e-4da3-a7fa-6a2e36682e1d", "address": "fa:16:3e:4a:c0:30", "network": {"id": "cf3ebaec-073e-4c37-a1fa-57b22598cfb5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1152817577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0394a4f1f9fd4b84b68187bc9fe8b41f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6955bb26-1b", "ovs_interfaceid": "6955bb26-1b2e-4da3-a7fa-6a2e36682e1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.425174] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-5e17e270-de90-4f31-a519-2b1e31b75031 req-c5f3eed4-1021-42e6-b5b4-557def243f67 service nova] Expecting reply to msg c682bc3f4c144d3fb433f1d7fcea4624 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1129.435640] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c682bc3f4c144d3fb433f1d7fcea4624 [ 1129.436268] env[62740]: DEBUG oslo_concurrency.lockutils [req-5e17e270-de90-4f31-a519-2b1e31b75031 req-c5f3eed4-1021-42e6-b5b4-557def243f67 service nova] Releasing lock "refresh_cache-e21a5624-20ca-45d8-a0bf-dd87cec1c701" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1129.523506] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a23d6d-3098-4d8a-99df-8dc528e80abf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.532235] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a6e66c-885b-43ef-bf26-4cbbe5d3f199 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.569474] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ee6936-c193-4734-81c0-05d98cd536e8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.577970] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf92254f-bd3b-4843-a188-fb3768cb620a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.591055] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1129.591538] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7f7c0dd3e97d475e803dd4d04bd97a45 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1129.601790] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f7c0dd3e97d475e803dd4d04bd97a45 [ 1129.603054] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1129.605501] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 178af20fdf344cf7abf9d3d26de8ac60 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1129.625734] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 178af20fdf344cf7abf9d3d26de8ac60 [ 1129.626529] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1129.626712] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.651s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.626936] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1129.627151] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Cleaning up deleted instances {{(pid=62740) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11217}} [ 1129.627637] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 4f5d92a04eec478eb6715381210b2a50 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1129.643071] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f5d92a04eec478eb6715381210b2a50 [ 1129.644288] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] There are 1 instances to clean {{(pid=62740) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 1129.644399] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: cf00af51-2b31-4b99-a692-8b0851dd74b8] Instance has had 0 of 5 cleanup attempts {{(pid=62740) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11230}} [ 1129.645683] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5aecfe8278664c9ab168500b4fe553f5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1129.707879] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5aecfe8278664c9ab168500b4fe553f5 [ 1129.772327] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "ba23ede2-be42-48ac-b281-571ccd158dee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.772842] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "ba23ede2-be42-48ac-b281-571ccd158dee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.180338] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ef076b1ae96d4b50bb21daf8cd0025df in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1130.189838] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef076b1ae96d4b50bb21daf8cd0025df [ 1130.707374] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.892039] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.892039] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1130.892039] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1130.892039] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0583fc72ffcd4c6fbb9e74551f9ce4a4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1130.914021] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0583fc72ffcd4c6fbb9e74551f9ce4a4 [ 1130.916554] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1130.916711] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1130.916886] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1130.917035] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 5f57389d-853e-4439-872a-8345664578d0] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1130.917165] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1130.917291] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1130.917415] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1130.917541] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 158406db-7196-4826-aefa-20a58daa186b] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1130.917660] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1130.917778] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1130.917895] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1131.890192] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.890842] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 01fd1b5e19004e1280873678c7a6f5ca in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1131.912807] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01fd1b5e19004e1280873678c7a6f5ca [ 1131.916901] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.920109] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1133.841804] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Acquiring lock "1ff3f5e9-284a-49b2-ad55-2c42f9b051c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.842106] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Lock "1ff3f5e9-284a-49b2-ad55-2c42f9b051c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.890634] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.890905] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.891234] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ab2d6924e0a34a1db1808ee6320d61f1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1133.902922] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab2d6924e0a34a1db1808ee6320d61f1 [ 1134.355960] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 20609e48d75d47989aabae71d2fd4480 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1134.374273] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20609e48d75d47989aabae71d2fd4480 [ 1134.374503] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "6ca702af-1a5c-40bb-b6c7-2f55ca308c02" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.662935] env[62740]: WARNING oslo_vmware.rw_handles [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1136.662935] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1136.662935] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1136.662935] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1136.662935] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1136.662935] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1136.662935] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1136.662935] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1136.662935] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1136.662935] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1136.662935] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1136.662935] env[62740]: ERROR oslo_vmware.rw_handles [ 1136.663601] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/841d92f6-8f78-4340-9547-4802566328ae/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1136.665215] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1136.665469] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Copying Virtual Disk [datastore1] vmware_temp/841d92f6-8f78-4340-9547-4802566328ae/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/841d92f6-8f78-4340-9547-4802566328ae/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1136.665757] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d13a07b0-8ee5-44f2-b1de-a5d7e815f06f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.674033] env[62740]: DEBUG oslo_vmware.api [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Waiting for the task: (returnval){ [ 1136.674033] env[62740]: value = "task-640217" [ 1136.674033] env[62740]: _type = "Task" [ 1136.674033] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.681678] env[62740]: DEBUG oslo_vmware.api [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Task: {'id': task-640217, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.184030] env[62740]: DEBUG oslo_vmware.exceptions [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1137.184404] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1137.185109] env[62740]: ERROR nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1137.185109] env[62740]: Faults: ['InvalidArgument'] [ 1137.185109] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Traceback (most recent call last): [ 1137.185109] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1137.185109] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] yield resources [ 1137.185109] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1137.185109] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] self.driver.spawn(context, instance, image_meta, [ 1137.185109] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1137.185109] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1137.185109] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1137.185109] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] self._fetch_image_if_missing(context, vi) [ 1137.185109] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1137.185484] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] image_cache(vi, tmp_image_ds_loc) [ 1137.185484] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1137.185484] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] vm_util.copy_virtual_disk( [ 1137.185484] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1137.185484] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] session._wait_for_task(vmdk_copy_task) [ 1137.185484] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1137.185484] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] return self.wait_for_task(task_ref) [ 1137.185484] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1137.185484] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] return evt.wait() [ 1137.185484] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1137.185484] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] result = hub.switch() [ 1137.185484] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1137.185484] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] return self.greenlet.switch() [ 1137.185834] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1137.185834] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] self.f(*self.args, **self.kw) [ 1137.185834] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1137.185834] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] raise exceptions.translate_fault(task_info.error) [ 1137.185834] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1137.185834] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Faults: ['InvalidArgument'] [ 1137.185834] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] [ 1137.185834] env[62740]: INFO nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Terminating instance [ 1137.187073] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.187284] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1137.187888] env[62740]: DEBUG nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1137.188090] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1137.188318] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2729c811-5d06-49d9-a1dd-23fe81ea7c17 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.190870] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16420fdc-3e6d-4ab0-89a0-a454aa92cc80 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.197321] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1137.198267] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c91f54d9-acde-490f-8ad3-f13abf9f0fb7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.199567] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1137.199740] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1137.200683] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb76e806-c739-4cd1-9069-2632d83b6c7c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.205173] env[62740]: DEBUG oslo_vmware.api [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Waiting for the task: (returnval){ [ 1137.205173] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52701bf4-1844-97e7-2145-a98e108802d5" [ 1137.205173] env[62740]: _type = "Task" [ 1137.205173] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.211859] env[62740]: DEBUG oslo_vmware.api [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52701bf4-1844-97e7-2145-a98e108802d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.716057] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1137.716352] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Creating directory with path [datastore1] vmware_temp/bff31539-5c5f-4bb3-a925-ecfcc8080ac5/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1137.716551] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14281190-f6bf-42bf-acff-1208aec90409 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.735648] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Created directory with path [datastore1] vmware_temp/bff31539-5c5f-4bb3-a925-ecfcc8080ac5/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1137.735852] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Fetch image to [datastore1] vmware_temp/bff31539-5c5f-4bb3-a925-ecfcc8080ac5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1137.736037] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/bff31539-5c5f-4bb3-a925-ecfcc8080ac5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1137.736770] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24822a27-7c2b-4d6b-b720-3b110fc60c7e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.743410] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5cec2a-4118-42ff-ad9c-db4e80a498f8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.752286] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c373d0f-621d-4278-8e4e-a8e679455674 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.783541] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70fede41-1d52-4298-9ef6-8fc465790062 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.789159] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d1eb2145-4e6a-4f75-a5ae-45404f210817 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.809179] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1137.861795] env[62740]: DEBUG oslo_vmware.rw_handles [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bff31539-5c5f-4bb3-a925-ecfcc8080ac5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1137.924453] env[62740]: DEBUG oslo_vmware.rw_handles [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1137.924669] env[62740]: DEBUG oslo_vmware.rw_handles [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bff31539-5c5f-4bb3-a925-ecfcc8080ac5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1139.532839] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1139.533183] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1139.533393] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Deleting the datastore file [datastore1] 697e3884-2ef4-423e-af81-e5d1e94f65a2 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1139.533970] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97658652-42b8-44fc-a838-df560f8676fe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.541227] env[62740]: DEBUG oslo_vmware.api [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Waiting for the task: (returnval){ [ 1139.541227] env[62740]: value = "task-640219" [ 1139.541227] env[62740]: _type = "Task" [ 1139.541227] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.549516] env[62740]: DEBUG oslo_vmware.api [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Task: {'id': task-640219, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.051761] env[62740]: DEBUG oslo_vmware.api [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Task: {'id': task-640219, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071978} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.052015] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1140.052212] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1140.052392] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1140.052568] env[62740]: INFO nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Took 2.86 seconds to destroy the instance on the hypervisor. [ 1140.054739] env[62740]: DEBUG nova.compute.claims [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1140.054920] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.055163] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.057113] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 1cfac895074548b9b9ce17dc99c3f8cd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1140.094778] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cfac895074548b9b9ce17dc99c3f8cd [ 1140.113397] env[62740]: DEBUG nova.scheduler.client.report [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Refreshing inventories for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1140.128999] env[62740]: DEBUG nova.scheduler.client.report [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Updating ProviderTree inventory for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1140.129257] env[62740]: DEBUG nova.compute.provider_tree [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Updating inventory in ProviderTree for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1140.139462] env[62740]: DEBUG nova.scheduler.client.report [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Refreshing aggregate associations for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0, aggregates: None {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1140.156109] env[62740]: DEBUG nova.scheduler.client.report [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Refreshing trait associations for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1140.352349] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f306b271-276d-4dd3-9e5b-28967ccab5f6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.359517] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b070034-5897-4c50-810c-d8c04062773c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.387908] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1505b62d-a37e-4afa-9d9b-9900aa05ba29 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.394358] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8234e4a1-930e-44f4-a688-a127a200dab9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.406630] env[62740]: DEBUG nova.compute.provider_tree [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1140.407133] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 95a29e5f67964aa79355e9321b3a84c0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1140.413952] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95a29e5f67964aa79355e9321b3a84c0 [ 1140.414829] env[62740]: DEBUG nova.scheduler.client.report [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1140.417123] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg dcdb47dd86604618a9900f5215fc6200 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1140.430501] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcdb47dd86604618a9900f5215fc6200 [ 1140.431209] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.376s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.431722] env[62740]: ERROR nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1140.431722] env[62740]: Faults: ['InvalidArgument'] [ 1140.431722] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Traceback (most recent call last): [ 1140.431722] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1140.431722] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] self.driver.spawn(context, instance, image_meta, [ 1140.431722] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1140.431722] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1140.431722] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1140.431722] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] self._fetch_image_if_missing(context, vi) [ 1140.431722] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1140.431722] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] image_cache(vi, tmp_image_ds_loc) [ 1140.431722] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1140.432132] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] vm_util.copy_virtual_disk( [ 1140.432132] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1140.432132] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] session._wait_for_task(vmdk_copy_task) [ 1140.432132] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1140.432132] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] return self.wait_for_task(task_ref) [ 1140.432132] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1140.432132] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] return evt.wait() [ 1140.432132] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1140.432132] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] result = hub.switch() [ 1140.432132] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1140.432132] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] return self.greenlet.switch() [ 1140.432132] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1140.432132] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] self.f(*self.args, **self.kw) [ 1140.432555] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1140.432555] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] raise exceptions.translate_fault(task_info.error) [ 1140.432555] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1140.432555] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Faults: ['InvalidArgument'] [ 1140.432555] env[62740]: ERROR nova.compute.manager [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] [ 1140.432555] env[62740]: DEBUG nova.compute.utils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1140.433807] env[62740]: DEBUG nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Build of instance 697e3884-2ef4-423e-af81-e5d1e94f65a2 was re-scheduled: A specified parameter was not correct: fileType [ 1140.433807] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1140.434203] env[62740]: DEBUG nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1140.434410] env[62740]: DEBUG nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1140.434547] env[62740]: DEBUG nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1140.434710] env[62740]: DEBUG nova.network.neutron [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1140.821396] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 8a473503602d463ea7573262f7f9aa71 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1140.837082] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a473503602d463ea7573262f7f9aa71 [ 1140.837789] env[62740]: DEBUG nova.network.neutron [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.838128] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg c2edb0dc0c114a80bba7ad804e1cdc2f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1140.848922] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2edb0dc0c114a80bba7ad804e1cdc2f [ 1140.849121] env[62740]: INFO nova.compute.manager [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Took 0.41 seconds to deallocate network for instance. [ 1140.850866] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 9c6278615fdf4efbbc3aee52537839cd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1140.891025] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c6278615fdf4efbbc3aee52537839cd [ 1140.892635] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg cac14d10d35c4634882474e4419ce44b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1140.922191] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cac14d10d35c4634882474e4419ce44b [ 1140.947086] env[62740]: INFO nova.scheduler.client.report [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Deleted allocations for instance 697e3884-2ef4-423e-af81-e5d1e94f65a2 [ 1140.953135] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 3143aee03e754c3b9d8394f2d2f51261 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1140.967651] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3143aee03e754c3b9d8394f2d2f51261 [ 1140.968202] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97359ccb-6346-4d30-b4db-437b89d3ec90 tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Lock "697e3884-2ef4-423e-af81-e5d1e94f65a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 433.934s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.968799] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg e2999d68f6a54f579b92d861e19daee9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1140.969642] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Lock "697e3884-2ef4-423e-af81-e5d1e94f65a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 238.374s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.969871] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Acquiring lock "697e3884-2ef4-423e-af81-e5d1e94f65a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.970452] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Lock "697e3884-2ef4-423e-af81-e5d1e94f65a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.970452] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Lock "697e3884-2ef4-423e-af81-e5d1e94f65a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.972730] env[62740]: INFO nova.compute.manager [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Terminating instance [ 1140.976871] env[62740]: DEBUG nova.compute.manager [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1140.977132] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1140.977531] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-139f1d57-9455-4f16-b4f7-6e946e824203 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.980687] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2999d68f6a54f579b92d861e19daee9 [ 1140.980941] env[62740]: DEBUG nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1140.982881] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 043684f4827d4b9a893acf6582d7106e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1140.990535] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8d23f4-bf92-4d6b-a567-9ec51aa94ab7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.020132] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 697e3884-2ef4-423e-af81-e5d1e94f65a2 could not be found. [ 1141.020418] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1141.020609] env[62740]: INFO nova.compute.manager [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1141.020885] env[62740]: DEBUG oslo.service.loopingcall [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1141.021139] env[62740]: DEBUG nova.compute.manager [-] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1141.021236] env[62740]: DEBUG nova.network.neutron [-] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1141.024563] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 043684f4827d4b9a893acf6582d7106e [ 1141.041180] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9ca5c93701bb4124b9cf6eede265f06f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1141.044249] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.044486] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.046061] env[62740]: INFO nova.compute.claims [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1141.047784] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 48954e35c97547878ea284583eb80217 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1141.049130] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ca5c93701bb4124b9cf6eede265f06f [ 1141.049882] env[62740]: DEBUG nova.network.neutron [-] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.050642] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8236530b50034a188ff00530b60b66ef in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1141.057376] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8236530b50034a188ff00530b60b66ef [ 1141.057795] env[62740]: INFO nova.compute.manager [-] [instance: 697e3884-2ef4-423e-af81-e5d1e94f65a2] Took 0.04 seconds to deallocate network for instance. [ 1141.061094] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 02ee0d0cc821441b8e52ae8b765a2f1f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1141.094662] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48954e35c97547878ea284583eb80217 [ 1141.097757] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 53983733438148549e485568812d5633 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1141.102240] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02ee0d0cc821441b8e52ae8b765a2f1f [ 1141.105090] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53983733438148549e485568812d5633 [ 1141.115856] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg 3bbe45dfa19c4705b00ab4bd2e3f1d7f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1141.156848] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3bbe45dfa19c4705b00ab4bd2e3f1d7f [ 1141.162204] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Lock "697e3884-2ef4-423e-af81-e5d1e94f65a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.193s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.162539] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5402c50a-9534-4b4b-94d3-7968411f783d tempest-ServerRescueTestJSONUnderV235-224305580 tempest-ServerRescueTestJSONUnderV235-224305580-project-member] Expecting reply to msg b1c16136f1ec40298ae07aefec15c32c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1141.175706] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1c16136f1ec40298ae07aefec15c32c [ 1141.304991] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d70acf4-6376-4e16-bc49-631445702462 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.312658] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3ef038-7ae6-4630-9cc8-7c47b5479021 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.343997] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a789385-0589-498c-a8cf-f83e3af89d8f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.350840] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb40118-6199-48e5-a85d-66c31b14de04 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.363349] env[62740]: DEBUG nova.compute.provider_tree [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1141.363839] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg bb9bc9c6e0bd426b8997279e72e7298f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1141.371515] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb9bc9c6e0bd426b8997279e72e7298f [ 1141.372582] env[62740]: DEBUG nova.scheduler.client.report [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1141.374871] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 035c6bab3a2f4414827f11f3799fc951 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1141.388044] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 035c6bab3a2f4414827f11f3799fc951 [ 1141.388768] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.344s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.389247] env[62740]: DEBUG nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1141.390951] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 054c43bfd6244d1db07a5671c65f9629 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1141.423577] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 054c43bfd6244d1db07a5671c65f9629 [ 1141.425204] env[62740]: DEBUG nova.compute.utils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1141.425771] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 85b4bf58117349c29cadf9a5cc180334 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1141.426728] env[62740]: DEBUG nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1141.426893] env[62740]: DEBUG nova.network.neutron [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1141.434372] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85b4bf58117349c29cadf9a5cc180334 [ 1141.434968] env[62740]: DEBUG nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1141.436745] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 003b9bed9eb044778209d8fec5cccc47 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1141.466933] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 003b9bed9eb044778209d8fec5cccc47 [ 1141.469723] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg f42711623cb64ebfa6acc613e5377323 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1141.498583] env[62740]: DEBUG nova.policy [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd69d6db778f64160881e1dfebfd4ed7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce4469c0ef4e4e42bb30cd2f947294f3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1141.500503] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f42711623cb64ebfa6acc613e5377323 [ 1141.501840] env[62740]: DEBUG nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1141.526934] env[62740]: DEBUG nova.virt.hardware [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1141.527184] env[62740]: DEBUG nova.virt.hardware [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1141.527345] env[62740]: DEBUG nova.virt.hardware [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1141.527530] env[62740]: DEBUG nova.virt.hardware [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1141.527678] env[62740]: DEBUG nova.virt.hardware [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1141.527826] env[62740]: DEBUG nova.virt.hardware [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1141.528044] env[62740]: DEBUG nova.virt.hardware [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1141.528211] env[62740]: DEBUG nova.virt.hardware [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1141.528398] env[62740]: DEBUG nova.virt.hardware [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1141.528576] env[62740]: DEBUG nova.virt.hardware [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1141.528750] env[62740]: DEBUG nova.virt.hardware [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1141.529604] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2925bd6d-2f0d-4dfb-90ff-480c31c8d26c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.537116] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd92bbdc-9c91-4749-b72a-359f6cff1d20 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.910996] env[62740]: DEBUG nova.network.neutron [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Successfully created port: 981d7484-4944-4cbf-9d44-1e8dc7e506bb {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1142.682746] env[62740]: DEBUG nova.network.neutron [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Successfully updated port: 981d7484-4944-4cbf-9d44-1e8dc7e506bb {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1142.682980] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 10fdfb92befc4059bf4d04904f662d56 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1142.693789] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10fdfb92befc4059bf4d04904f662d56 [ 1142.694553] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "refresh_cache-b0b16f66-8dbc-4e9b-a932-5de45215cfff" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.694741] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquired lock "refresh_cache-b0b16f66-8dbc-4e9b-a932-5de45215cfff" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.695081] env[62740]: DEBUG nova.network.neutron [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1142.695367] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 5e47b59fbf604a64b0834d098fa8303f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1142.704841] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e47b59fbf604a64b0834d098fa8303f [ 1142.751194] env[62740]: DEBUG nova.network.neutron [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1142.932295] env[62740]: DEBUG nova.compute.manager [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Received event network-vif-plugged-981d7484-4944-4cbf-9d44-1e8dc7e506bb {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1142.932522] env[62740]: DEBUG oslo_concurrency.lockutils [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] Acquiring lock "b0b16f66-8dbc-4e9b-a932-5de45215cfff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.932668] env[62740]: DEBUG oslo_concurrency.lockutils [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] Lock "b0b16f66-8dbc-4e9b-a932-5de45215cfff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.932818] env[62740]: DEBUG oslo_concurrency.lockutils [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] Lock "b0b16f66-8dbc-4e9b-a932-5de45215cfff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.932986] env[62740]: DEBUG nova.compute.manager [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] No waiting events found dispatching network-vif-plugged-981d7484-4944-4cbf-9d44-1e8dc7e506bb {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1142.933161] env[62740]: WARNING nova.compute.manager [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Received unexpected event network-vif-plugged-981d7484-4944-4cbf-9d44-1e8dc7e506bb for instance with vm_state building and task_state spawning. [ 1142.933321] env[62740]: DEBUG nova.compute.manager [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Received event network-changed-981d7484-4944-4cbf-9d44-1e8dc7e506bb {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1142.933473] env[62740]: DEBUG nova.compute.manager [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Refreshing instance network info cache due to event network-changed-981d7484-4944-4cbf-9d44-1e8dc7e506bb. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1142.933634] env[62740]: DEBUG oslo_concurrency.lockutils [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] Acquiring lock "refresh_cache-b0b16f66-8dbc-4e9b-a932-5de45215cfff" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.965602] env[62740]: DEBUG nova.network.neutron [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Updating instance_info_cache with network_info: [{"id": "981d7484-4944-4cbf-9d44-1e8dc7e506bb", "address": "fa:16:3e:91:d1:73", "network": {"id": "c7681252-9fbe-485e-ab67-59da6e6d7279", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1927212820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce4469c0ef4e4e42bb30cd2f947294f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap981d7484-49", "ovs_interfaceid": "981d7484-4944-4cbf-9d44-1e8dc7e506bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.966095] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 104f3b581a214960a12c360f50ebff84 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1142.978573] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 104f3b581a214960a12c360f50ebff84 [ 1142.978953] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Releasing lock "refresh_cache-b0b16f66-8dbc-4e9b-a932-5de45215cfff" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.979234] env[62740]: DEBUG nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Instance network_info: |[{"id": "981d7484-4944-4cbf-9d44-1e8dc7e506bb", "address": "fa:16:3e:91:d1:73", "network": {"id": "c7681252-9fbe-485e-ab67-59da6e6d7279", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1927212820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce4469c0ef4e4e42bb30cd2f947294f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap981d7484-49", "ovs_interfaceid": "981d7484-4944-4cbf-9d44-1e8dc7e506bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1142.979521] env[62740]: DEBUG oslo_concurrency.lockutils [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] Acquired lock "refresh_cache-b0b16f66-8dbc-4e9b-a932-5de45215cfff" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.979695] env[62740]: DEBUG nova.network.neutron [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Refreshing network info cache for port 981d7484-4944-4cbf-9d44-1e8dc7e506bb {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1142.980084] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] Expecting reply to msg f8fb9c5c2d7e494886585eab053086a0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1142.981063] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:d1:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0a76279-3c11-4bef-b124-2a2ee13fa377', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '981d7484-4944-4cbf-9d44-1e8dc7e506bb', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1142.988305] env[62740]: DEBUG oslo.service.loopingcall [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1142.988937] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8fb9c5c2d7e494886585eab053086a0 [ 1142.989276] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1142.992028] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1badd9a-cd42-4fba-a619-9844b0bccbcb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.011722] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1143.011722] env[62740]: value = "task-640220" [ 1143.011722] env[62740]: _type = "Task" [ 1143.011722] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.019366] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640220, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.521596] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640220, 'name': CreateVM_Task, 'duration_secs': 0.278681} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.521813] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1143.522533] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1143.522672] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.522942] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1143.523531] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-631f0850-e94c-4f09-a2af-50314ac7d24d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.528097] env[62740]: DEBUG oslo_vmware.api [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for the task: (returnval){ [ 1143.528097] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52f0e952-412b-d007-5ce6-d7221f78b55f" [ 1143.528097] env[62740]: _type = "Task" [ 1143.528097] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.535850] env[62740]: DEBUG oslo_vmware.api [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52f0e952-412b-d007-5ce6-d7221f78b55f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.572405] env[62740]: DEBUG nova.network.neutron [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Updated VIF entry in instance network info cache for port 981d7484-4944-4cbf-9d44-1e8dc7e506bb. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1143.572771] env[62740]: DEBUG nova.network.neutron [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Updating instance_info_cache with network_info: [{"id": "981d7484-4944-4cbf-9d44-1e8dc7e506bb", "address": "fa:16:3e:91:d1:73", "network": {"id": "c7681252-9fbe-485e-ab67-59da6e6d7279", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1927212820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce4469c0ef4e4e42bb30cd2f947294f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap981d7484-49", "ovs_interfaceid": "981d7484-4944-4cbf-9d44-1e8dc7e506bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.573280] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] Expecting reply to msg a0d58d6684574c0093b5703cce90ed4a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1143.582293] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0d58d6684574c0093b5703cce90ed4a [ 1143.582942] env[62740]: DEBUG oslo_concurrency.lockutils [req-6ab67388-6b40-4da9-ab8c-efed8c8493b2 req-a9b4e7e1-baf7-4874-a908-7f2945181f51 service nova] Releasing lock "refresh_cache-b0b16f66-8dbc-4e9b-a932-5de45215cfff" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1144.038752] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1144.039075] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1144.039235] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1151.011400] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg 3a0e343fd741484b80051387b744f18e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1151.021023] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a0e343fd741484b80051387b744f18e [ 1151.021227] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Acquiring lock "e21a5624-20ca-45d8-a0bf-dd87cec1c701" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.476653] env[62740]: WARNING oslo_vmware.rw_handles [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1169.476653] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1169.476653] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1169.476653] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1169.476653] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1169.476653] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1169.476653] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1169.476653] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1169.476653] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1169.476653] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1169.476653] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1169.476653] env[62740]: ERROR oslo_vmware.rw_handles [ 1169.477619] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/b0737379-5ab7-48ce-bc15-353dd8126301/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1169.479453] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1169.479675] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Copying Virtual Disk [datastore2] vmware_temp/b0737379-5ab7-48ce-bc15-353dd8126301/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/b0737379-5ab7-48ce-bc15-353dd8126301/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1169.479983] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dda7ed2e-31f3-4d5a-a0e5-7d48daffc62d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.490701] env[62740]: DEBUG oslo_vmware.api [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for the task: (returnval){ [ 1169.490701] env[62740]: value = "task-640221" [ 1169.490701] env[62740]: _type = "Task" [ 1169.490701] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.498241] env[62740]: DEBUG oslo_vmware.api [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Task: {'id': task-640221, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.002686] env[62740]: DEBUG oslo_vmware.exceptions [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1170.002977] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1170.003556] env[62740]: ERROR nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1170.003556] env[62740]: Faults: ['InvalidArgument'] [ 1170.003556] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Traceback (most recent call last): [ 1170.003556] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1170.003556] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] yield resources [ 1170.003556] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1170.003556] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] self.driver.spawn(context, instance, image_meta, [ 1170.003556] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1170.003556] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1170.003556] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1170.003556] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] self._fetch_image_if_missing(context, vi) [ 1170.003556] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1170.003986] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] image_cache(vi, tmp_image_ds_loc) [ 1170.003986] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1170.003986] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] vm_util.copy_virtual_disk( [ 1170.003986] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1170.003986] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] session._wait_for_task(vmdk_copy_task) [ 1170.003986] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1170.003986] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] return self.wait_for_task(task_ref) [ 1170.003986] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1170.003986] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] return evt.wait() [ 1170.003986] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1170.003986] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] result = hub.switch() [ 1170.003986] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1170.003986] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] return self.greenlet.switch() [ 1170.004447] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1170.004447] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] self.f(*self.args, **self.kw) [ 1170.004447] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1170.004447] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] raise exceptions.translate_fault(task_info.error) [ 1170.004447] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1170.004447] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Faults: ['InvalidArgument'] [ 1170.004447] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] [ 1170.004447] env[62740]: INFO nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Terminating instance [ 1170.005951] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.005951] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1170.005951] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ca10c0f-3dad-4d7e-ad74-0ee82f48ebcc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.007976] env[62740]: DEBUG nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1170.008184] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1170.008917] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adda99d6-827d-448f-8748-48fd2447d037 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.015491] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1170.015697] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e855520b-1aef-494d-ad6f-fbe5a0b69475 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.017768] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1170.017945] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1170.018920] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a56dcc36-8884-4bce-a765-87c1fa23bed5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.023556] env[62740]: DEBUG oslo_vmware.api [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Waiting for the task: (returnval){ [ 1170.023556] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]522f4fb7-56cf-8a0f-0a8c-da0e8cb80df5" [ 1170.023556] env[62740]: _type = "Task" [ 1170.023556] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.035418] env[62740]: DEBUG oslo_vmware.api [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]522f4fb7-56cf-8a0f-0a8c-da0e8cb80df5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.088802] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1170.089019] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1170.089210] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Deleting the datastore file [datastore2] 6ca702af-1a5c-40bb-b6c7-2f55ca308c02 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1170.089484] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-053d3014-f938-44d1-9449-0abd565e877d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.095653] env[62740]: DEBUG oslo_vmware.api [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for the task: (returnval){ [ 1170.095653] env[62740]: value = "task-640223" [ 1170.095653] env[62740]: _type = "Task" [ 1170.095653] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.103397] env[62740]: DEBUG oslo_vmware.api [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Task: {'id': task-640223, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.534679] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1170.535081] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Creating directory with path [datastore2] vmware_temp/cf3ab7a6-586b-4224-bb19-1f5aef50ad4f/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1170.535195] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f91d02ad-3d9a-4996-a6cf-0b6c24abdf6e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.547787] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Created directory with path [datastore2] vmware_temp/cf3ab7a6-586b-4224-bb19-1f5aef50ad4f/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1170.547986] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Fetch image to [datastore2] vmware_temp/cf3ab7a6-586b-4224-bb19-1f5aef50ad4f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1170.548176] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/cf3ab7a6-586b-4224-bb19-1f5aef50ad4f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1170.548993] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d63175-31af-4f26-8391-650aab721953 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.555600] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47af93b0-bad6-4ed0-bc97-404377d56f23 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.564753] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03c3e2c-05b9-44ea-89be-ad71d00bfed4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.594896] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1a4b86-6a9a-4b4c-9d44-3ffe1576fcc3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.606122] env[62740]: DEBUG oslo_vmware.api [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Task: {'id': task-640223, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064146} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.606603] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1170.606799] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1170.606983] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1170.607178] env[62740]: INFO nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1170.608772] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-48937ce7-2b84-4feb-94e5-26a84565d90e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.610609] env[62740]: DEBUG nova.compute.claims [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1170.610778] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.610995] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.612986] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 44d9ac66bc3b47c9b6ca7a5142b76f01 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1170.632555] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1170.654625] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44d9ac66bc3b47c9b6ca7a5142b76f01 [ 1170.686475] env[62740]: DEBUG oslo_vmware.rw_handles [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cf3ab7a6-586b-4224-bb19-1f5aef50ad4f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1170.749691] env[62740]: DEBUG oslo_vmware.rw_handles [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1170.749878] env[62740]: DEBUG oslo_vmware.rw_handles [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cf3ab7a6-586b-4224-bb19-1f5aef50ad4f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1170.915136] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7466b5d7-fb5b-4d0c-a120-c77520ae2a51 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.922951] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ddab5f-d610-4cc6-834a-b6d66a144d61 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.952168] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5318222b-3b85-42f5-a4ad-909f0b0199c4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.959102] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54040b7-b341-49f1-9e17-6fcdd3fc005c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.972185] env[62740]: DEBUG nova.compute.provider_tree [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1170.972685] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg d2b3149a42ce45da8898a81f1d84e78c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1170.980062] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2b3149a42ce45da8898a81f1d84e78c [ 1170.980941] env[62740]: DEBUG nova.scheduler.client.report [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1170.983190] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 3e86659241714a66b2740d85eb86680a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1170.995222] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e86659241714a66b2740d85eb86680a [ 1170.995804] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.385s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1170.996329] env[62740]: ERROR nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1170.996329] env[62740]: Faults: ['InvalidArgument'] [ 1170.996329] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Traceback (most recent call last): [ 1170.996329] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1170.996329] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] self.driver.spawn(context, instance, image_meta, [ 1170.996329] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1170.996329] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1170.996329] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1170.996329] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] self._fetch_image_if_missing(context, vi) [ 1170.996329] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1170.996329] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] image_cache(vi, tmp_image_ds_loc) [ 1170.996329] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1170.996776] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] vm_util.copy_virtual_disk( [ 1170.996776] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1170.996776] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] session._wait_for_task(vmdk_copy_task) [ 1170.996776] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1170.996776] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] return self.wait_for_task(task_ref) [ 1170.996776] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1170.996776] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] return evt.wait() [ 1170.996776] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1170.996776] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] result = hub.switch() [ 1170.996776] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1170.996776] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] return self.greenlet.switch() [ 1170.996776] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1170.996776] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] self.f(*self.args, **self.kw) [ 1170.997189] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1170.997189] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] raise exceptions.translate_fault(task_info.error) [ 1170.997189] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1170.997189] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Faults: ['InvalidArgument'] [ 1170.997189] env[62740]: ERROR nova.compute.manager [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] [ 1170.997189] env[62740]: DEBUG nova.compute.utils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1170.998414] env[62740]: DEBUG nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Build of instance 6ca702af-1a5c-40bb-b6c7-2f55ca308c02 was re-scheduled: A specified parameter was not correct: fileType [ 1170.998414] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1170.998784] env[62740]: DEBUG nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1170.998946] env[62740]: DEBUG nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1170.999134] env[62740]: DEBUG nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1170.999297] env[62740]: DEBUG nova.network.neutron [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1171.315689] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg e47e9830eb7b4b258e452004cb073a96 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.328661] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e47e9830eb7b4b258e452004cb073a96 [ 1171.329219] env[62740]: DEBUG nova.network.neutron [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.329751] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 79bafc312d8f4a2092c9fab8be579043 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.343858] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79bafc312d8f4a2092c9fab8be579043 [ 1171.344502] env[62740]: INFO nova.compute.manager [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Took 0.35 seconds to deallocate network for instance. [ 1171.346447] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 94e0e9a8c62f405b9ad5679882e06789 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.380868] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94e0e9a8c62f405b9ad5679882e06789 [ 1171.384155] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 61d753232e014c069f327eee09b621c2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.420026] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61d753232e014c069f327eee09b621c2 [ 1171.448319] env[62740]: INFO nova.scheduler.client.report [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Deleted allocations for instance 6ca702af-1a5c-40bb-b6c7-2f55ca308c02 [ 1171.454576] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg f23b9b4225ff48238df6c28d678d635b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.469074] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f23b9b4225ff48238df6c28d678d635b [ 1171.470027] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ad257a5-c2c6-4996-806b-f64d34790904 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "6ca702af-1a5c-40bb-b6c7-2f55ca308c02" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.146s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.470884] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 147f76258d2c4e6ebd90b8bf4aa54f3d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.471060] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "6ca702af-1a5c-40bb-b6c7-2f55ca308c02" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 37.097s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.471371] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "6ca702af-1a5c-40bb-b6c7-2f55ca308c02-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.472023] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "6ca702af-1a5c-40bb-b6c7-2f55ca308c02-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.472538] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "6ca702af-1a5c-40bb-b6c7-2f55ca308c02-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.474279] env[62740]: INFO nova.compute.manager [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Terminating instance [ 1171.475931] env[62740]: DEBUG nova.compute.manager [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1171.476129] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1171.476759] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-522fdb23-1146-4a1a-a3f0-7d37acf949a3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.481395] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 147f76258d2c4e6ebd90b8bf4aa54f3d [ 1171.481897] env[62740]: DEBUG nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1171.483817] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 52b77b0142254ae5b8502d84005cca18 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.492272] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880bb2a1-cc07-400d-a8fa-144d7aabd614 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.520176] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6ca702af-1a5c-40bb-b6c7-2f55ca308c02 could not be found. [ 1171.520406] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1171.520587] env[62740]: INFO nova.compute.manager [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1171.520839] env[62740]: DEBUG oslo.service.loopingcall [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1171.522021] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52b77b0142254ae5b8502d84005cca18 [ 1171.522021] env[62740]: DEBUG nova.compute.manager [-] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1171.522163] env[62740]: DEBUG nova.network.neutron [-] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1171.537597] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.537878] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.541144] env[62740]: INFO nova.compute.claims [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1171.541144] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 45da91dc14a346d4aab8bc87de6be77c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.544706] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6807e951393449b78cf7e98a2c5cd795 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.551479] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6807e951393449b78cf7e98a2c5cd795 [ 1171.551829] env[62740]: DEBUG nova.network.neutron [-] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.552214] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0cde46c67ca3470a80e6fc9944a0e625 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.571619] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cde46c67ca3470a80e6fc9944a0e625 [ 1171.572097] env[62740]: INFO nova.compute.manager [-] [instance: 6ca702af-1a5c-40bb-b6c7-2f55ca308c02] Took 0.05 seconds to deallocate network for instance. [ 1171.576129] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 54771fd60d994e45aa4a9d53dbc65347 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.577674] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45da91dc14a346d4aab8bc87de6be77c [ 1171.579194] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg a1c133f9eee04232a46784043f439649 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.584723] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1c133f9eee04232a46784043f439649 [ 1171.601577] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54771fd60d994e45aa4a9d53dbc65347 [ 1171.616609] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 2e72ac39ced142fb9fa5910a8c751d18 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.660441] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e72ac39ced142fb9fa5910a8c751d18 [ 1171.663295] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "6ca702af-1a5c-40bb-b6c7-2f55ca308c02" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.663625] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ef577d0e-57db-474d-a2b7-cf83e187a51b tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 5a9db5e68e4c48eab11f5f3ab6bef16a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.674334] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a9db5e68e4c48eab11f5f3ab6bef16a [ 1171.782096] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7916b4-6bae-4c56-a2a9-627f2af9b78b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.789482] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ed19ed-3a77-4bea-967e-a8f8e818717d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.819478] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721d1c1a-b39d-4f55-852a-976d76a46586 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.826114] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62f8c74-f9ca-4280-be48-58da82053570 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.838834] env[62740]: DEBUG nova.compute.provider_tree [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1171.839343] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 0c2d4c152323443d9180995c84bd81ff in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.847189] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c2d4c152323443d9180995c84bd81ff [ 1171.848099] env[62740]: DEBUG nova.scheduler.client.report [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1171.850472] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 2bc5a2f3e8b443af885657bcfaca4ec8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.862375] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bc5a2f3e8b443af885657bcfaca4ec8 [ 1171.863215] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.325s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.863691] env[62740]: DEBUG nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1171.865470] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 4c03a3c6bcde46c4adadd5fae28e0e01 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.897228] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c03a3c6bcde46c4adadd5fae28e0e01 [ 1171.898893] env[62740]: DEBUG nova.compute.utils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1171.899495] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 39d61f0753ec419ab85f0035df1b499d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.901106] env[62740]: DEBUG nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1171.901284] env[62740]: DEBUG nova.network.neutron [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1171.908577] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39d61f0753ec419ab85f0035df1b499d [ 1171.909076] env[62740]: DEBUG nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1171.910654] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg f90522bf4bf2486abee956a6eb7a2d56 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.939080] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f90522bf4bf2486abee956a6eb7a2d56 [ 1171.941651] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 8a60e8fa0d9d4c06a2c4f4ed2d881453 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1171.970261] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a60e8fa0d9d4c06a2c4f4ed2d881453 [ 1171.971394] env[62740]: DEBUG nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1171.999961] env[62740]: DEBUG nova.virt.hardware [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1172.000245] env[62740]: DEBUG nova.virt.hardware [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1172.000360] env[62740]: DEBUG nova.virt.hardware [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1172.000541] env[62740]: DEBUG nova.virt.hardware [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1172.000690] env[62740]: DEBUG nova.virt.hardware [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1172.000840] env[62740]: DEBUG nova.virt.hardware [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1172.001060] env[62740]: DEBUG nova.virt.hardware [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1172.001232] env[62740]: DEBUG nova.virt.hardware [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1172.001403] env[62740]: DEBUG nova.virt.hardware [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1172.001572] env[62740]: DEBUG nova.virt.hardware [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1172.001747] env[62740]: DEBUG nova.virt.hardware [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1172.002583] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37ac003-b6e4-4374-a53a-f06622d4ebde {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.010597] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101e0674-e038-4185-a48f-67f7ff3ad209 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.015628] env[62740]: DEBUG nova.policy [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa549a18cbf84678844e14ddd094d70e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '065d149aea7645d7a5e32c0d14ff0936', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1172.526609] env[62740]: DEBUG nova.network.neutron [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Successfully created port: 73008d44-45fc-44c1-b942-d2f34375cff7 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1173.514804] env[62740]: DEBUG nova.compute.manager [req-9625275d-9f93-4259-82eb-19653dd75f30 req-922750f7-60c0-4cc2-b30a-9b5d851b12e3 service nova] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Received event network-vif-plugged-73008d44-45fc-44c1-b942-d2f34375cff7 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1173.515092] env[62740]: DEBUG oslo_concurrency.lockutils [req-9625275d-9f93-4259-82eb-19653dd75f30 req-922750f7-60c0-4cc2-b30a-9b5d851b12e3 service nova] Acquiring lock "56106517-e735-4bf5-8d5a-dc0d4aab3991-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.515248] env[62740]: DEBUG oslo_concurrency.lockutils [req-9625275d-9f93-4259-82eb-19653dd75f30 req-922750f7-60c0-4cc2-b30a-9b5d851b12e3 service nova] Lock "56106517-e735-4bf5-8d5a-dc0d4aab3991-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.515421] env[62740]: DEBUG oslo_concurrency.lockutils [req-9625275d-9f93-4259-82eb-19653dd75f30 req-922750f7-60c0-4cc2-b30a-9b5d851b12e3 service nova] Lock "56106517-e735-4bf5-8d5a-dc0d4aab3991-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.515615] env[62740]: DEBUG nova.compute.manager [req-9625275d-9f93-4259-82eb-19653dd75f30 req-922750f7-60c0-4cc2-b30a-9b5d851b12e3 service nova] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] No waiting events found dispatching network-vif-plugged-73008d44-45fc-44c1-b942-d2f34375cff7 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1173.515793] env[62740]: WARNING nova.compute.manager [req-9625275d-9f93-4259-82eb-19653dd75f30 req-922750f7-60c0-4cc2-b30a-9b5d851b12e3 service nova] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Received unexpected event network-vif-plugged-73008d44-45fc-44c1-b942-d2f34375cff7 for instance with vm_state building and task_state spawning. [ 1173.726098] env[62740]: DEBUG nova.network.neutron [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Successfully updated port: 73008d44-45fc-44c1-b942-d2f34375cff7 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1173.726600] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 2b009fc8b0c94404bbdc897b072d420c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1173.738015] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b009fc8b0c94404bbdc897b072d420c [ 1173.738722] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "refresh_cache-56106517-e735-4bf5-8d5a-dc0d4aab3991" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1173.738885] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "refresh_cache-56106517-e735-4bf5-8d5a-dc0d4aab3991" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1173.739065] env[62740]: DEBUG nova.network.neutron [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1173.739468] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 9b78181b0c2d414d8c94f2ae72ad2821 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1173.747638] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b78181b0c2d414d8c94f2ae72ad2821 [ 1173.999029] env[62740]: DEBUG nova.network.neutron [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1174.342552] env[62740]: DEBUG nova.network.neutron [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Updating instance_info_cache with network_info: [{"id": "73008d44-45fc-44c1-b942-d2f34375cff7", "address": "fa:16:3e:1c:90:37", "network": {"id": "a1bf429f-63e1-4b06-ba31-36e8e686268d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1763096855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "065d149aea7645d7a5e32c0d14ff0936", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73008d44-45", "ovs_interfaceid": "73008d44-45fc-44c1-b942-d2f34375cff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.343062] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 18a65752af224955aeca6de2f007522e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1174.361073] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18a65752af224955aeca6de2f007522e [ 1174.361696] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Releasing lock "refresh_cache-56106517-e735-4bf5-8d5a-dc0d4aab3991" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1174.361976] env[62740]: DEBUG nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Instance network_info: |[{"id": "73008d44-45fc-44c1-b942-d2f34375cff7", "address": "fa:16:3e:1c:90:37", "network": {"id": "a1bf429f-63e1-4b06-ba31-36e8e686268d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1763096855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "065d149aea7645d7a5e32c0d14ff0936", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73008d44-45", "ovs_interfaceid": "73008d44-45fc-44c1-b942-d2f34375cff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1174.362380] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:90:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b399c74-1411-408a-b4cd-84e268ae83fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73008d44-45fc-44c1-b942-d2f34375cff7', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1174.370018] env[62740]: DEBUG oslo.service.loopingcall [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1174.370353] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1174.370580] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e49b900-2d58-4362-86f3-8bf324fbfb33 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.391042] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1174.391042] env[62740]: value = "task-640224" [ 1174.391042] env[62740]: _type = "Task" [ 1174.391042] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.902112] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640224, 'name': CreateVM_Task, 'duration_secs': 0.370205} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.902406] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1174.902806] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1174.902985] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.903393] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1174.903630] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f08af29-91f1-415f-85bc-571fe76110e9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.908190] env[62740]: DEBUG oslo_vmware.api [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 1174.908190] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52544230-e2b1-8de8-8ac2-420071298ea8" [ 1174.908190] env[62740]: _type = "Task" [ 1174.908190] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.915956] env[62740]: DEBUG oslo_vmware.api [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52544230-e2b1-8de8-8ac2-420071298ea8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.101321] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "2deff09f-d24f-4609-91f2-1585e8407c2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.101550] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "2deff09f-d24f-4609-91f2-1585e8407c2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.418735] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1175.419051] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1175.419295] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1175.539849] env[62740]: DEBUG nova.compute.manager [req-3d9b9659-2ef1-40df-9b2b-090cd264a702 req-61ace415-ee9e-4e57-a4aa-cf3bfbe26043 service nova] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Received event network-changed-73008d44-45fc-44c1-b942-d2f34375cff7 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1175.540065] env[62740]: DEBUG nova.compute.manager [req-3d9b9659-2ef1-40df-9b2b-090cd264a702 req-61ace415-ee9e-4e57-a4aa-cf3bfbe26043 service nova] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Refreshing instance network info cache due to event network-changed-73008d44-45fc-44c1-b942-d2f34375cff7. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1175.540283] env[62740]: DEBUG oslo_concurrency.lockutils [req-3d9b9659-2ef1-40df-9b2b-090cd264a702 req-61ace415-ee9e-4e57-a4aa-cf3bfbe26043 service nova] Acquiring lock "refresh_cache-56106517-e735-4bf5-8d5a-dc0d4aab3991" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1175.540427] env[62740]: DEBUG oslo_concurrency.lockutils [req-3d9b9659-2ef1-40df-9b2b-090cd264a702 req-61ace415-ee9e-4e57-a4aa-cf3bfbe26043 service nova] Acquired lock "refresh_cache-56106517-e735-4bf5-8d5a-dc0d4aab3991" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.540609] env[62740]: DEBUG nova.network.neutron [req-3d9b9659-2ef1-40df-9b2b-090cd264a702 req-61ace415-ee9e-4e57-a4aa-cf3bfbe26043 service nova] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Refreshing network info cache for port 73008d44-45fc-44c1-b942-d2f34375cff7 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1175.541116] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-3d9b9659-2ef1-40df-9b2b-090cd264a702 req-61ace415-ee9e-4e57-a4aa-cf3bfbe26043 service nova] Expecting reply to msg 87a9897baa0748f68278352f700984b0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1175.548786] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87a9897baa0748f68278352f700984b0 [ 1175.855544] env[62740]: DEBUG nova.network.neutron [req-3d9b9659-2ef1-40df-9b2b-090cd264a702 req-61ace415-ee9e-4e57-a4aa-cf3bfbe26043 service nova] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Updated VIF entry in instance network info cache for port 73008d44-45fc-44c1-b942-d2f34375cff7. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1175.855938] env[62740]: DEBUG nova.network.neutron [req-3d9b9659-2ef1-40df-9b2b-090cd264a702 req-61ace415-ee9e-4e57-a4aa-cf3bfbe26043 service nova] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Updating instance_info_cache with network_info: [{"id": "73008d44-45fc-44c1-b942-d2f34375cff7", "address": "fa:16:3e:1c:90:37", "network": {"id": "a1bf429f-63e1-4b06-ba31-36e8e686268d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1763096855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "065d149aea7645d7a5e32c0d14ff0936", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73008d44-45", "ovs_interfaceid": "73008d44-45fc-44c1-b942-d2f34375cff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1175.856546] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-3d9b9659-2ef1-40df-9b2b-090cd264a702 req-61ace415-ee9e-4e57-a4aa-cf3bfbe26043 service nova] Expecting reply to msg 0de7dc6957c84abd802ee18076de37c9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1175.866793] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0de7dc6957c84abd802ee18076de37c9 [ 1175.867405] env[62740]: DEBUG oslo_concurrency.lockutils [req-3d9b9659-2ef1-40df-9b2b-090cd264a702 req-61ace415-ee9e-4e57-a4aa-cf3bfbe26043 service nova] Releasing lock "refresh_cache-56106517-e735-4bf5-8d5a-dc0d4aab3991" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1182.214041] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_power_states {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1182.214358] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 1cf4326094254396b5feebcce6e1693c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1182.235029] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cf4326094254396b5feebcce6e1693c [ 1182.237292] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Getting list of instances from cluster (obj){ [ 1182.237292] env[62740]: value = "domain-c8" [ 1182.237292] env[62740]: _type = "ClusterComputeResource" [ 1182.237292] env[62740]: } {{(pid=62740) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1182.238921] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f846e3-6740-46ed-8aa7-33935e79fe89 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.255575] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Got total of 10 instances {{(pid=62740) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1182.255750] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 732da1c8-e83e-4dd7-96c2-dbfa9468baab {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1182.255943] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 388a39df-9fa9-4153-9f3c-4ad94fd5edfb {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1182.256119] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 5f57389d-853e-4439-872a-8345664578d0 {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1182.256279] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 472cd209-4192-4473-b788-d1ea342653bf {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1182.256436] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid d8dac9af-0897-4fbf-8ee6-1fb3955d48c0 {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1182.256588] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid d6c3ca16-5c7c-41e6-9850-10221603ad2a {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1182.256741] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 158406db-7196-4826-aefa-20a58daa186b {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1182.256889] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid e21a5624-20ca-45d8-a0bf-dd87cec1c701 {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1182.257103] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid b0b16f66-8dbc-4e9b-a932-5de45215cfff {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1182.257288] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 56106517-e735-4bf5-8d5a-dc0d4aab3991 {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1182.257696] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "732da1c8-e83e-4dd7-96c2-dbfa9468baab" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.257938] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "388a39df-9fa9-4153-9f3c-4ad94fd5edfb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.258164] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "5f57389d-853e-4439-872a-8345664578d0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.258391] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "472cd209-4192-4473-b788-d1ea342653bf" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.258586] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "d8dac9af-0897-4fbf-8ee6-1fb3955d48c0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.258778] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "d6c3ca16-5c7c-41e6-9850-10221603ad2a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.258968] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "158406db-7196-4826-aefa-20a58daa186b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.259168] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "e21a5624-20ca-45d8-a0bf-dd87cec1c701" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.259374] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "b0b16f66-8dbc-4e9b-a932-5de45215cfff" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.259579] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "56106517-e735-4bf5-8d5a-dc0d4aab3991" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.457665] env[62740]: WARNING oslo_vmware.rw_handles [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1186.457665] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1186.457665] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1186.457665] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1186.457665] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1186.457665] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1186.457665] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1186.457665] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1186.457665] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1186.457665] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1186.457665] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1186.457665] env[62740]: ERROR oslo_vmware.rw_handles [ 1186.458419] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/bff31539-5c5f-4bb3-a925-ecfcc8080ac5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1186.460443] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1186.460717] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Copying Virtual Disk [datastore1] vmware_temp/bff31539-5c5f-4bb3-a925-ecfcc8080ac5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/bff31539-5c5f-4bb3-a925-ecfcc8080ac5/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1186.461029] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fcd66320-90df-469b-9dd2-3e064022db85 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.469197] env[62740]: DEBUG oslo_vmware.api [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Waiting for the task: (returnval){ [ 1186.469197] env[62740]: value = "task-640225" [ 1186.469197] env[62740]: _type = "Task" [ 1186.469197] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.481034] env[62740]: DEBUG oslo_vmware.api [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Task: {'id': task-640225, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.980335] env[62740]: DEBUG oslo_vmware.exceptions [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1186.980626] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1186.981196] env[62740]: ERROR nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1186.981196] env[62740]: Faults: ['InvalidArgument'] [ 1186.981196] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Traceback (most recent call last): [ 1186.981196] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1186.981196] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] yield resources [ 1186.981196] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1186.981196] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] self.driver.spawn(context, instance, image_meta, [ 1186.981196] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1186.981196] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1186.981196] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1186.981196] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] self._fetch_image_if_missing(context, vi) [ 1186.981196] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1186.981664] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] image_cache(vi, tmp_image_ds_loc) [ 1186.981664] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1186.981664] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] vm_util.copy_virtual_disk( [ 1186.981664] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1186.981664] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] session._wait_for_task(vmdk_copy_task) [ 1186.981664] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1186.981664] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] return self.wait_for_task(task_ref) [ 1186.981664] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1186.981664] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] return evt.wait() [ 1186.981664] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1186.981664] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] result = hub.switch() [ 1186.981664] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1186.981664] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] return self.greenlet.switch() [ 1186.982206] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1186.982206] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] self.f(*self.args, **self.kw) [ 1186.982206] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1186.982206] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] raise exceptions.translate_fault(task_info.error) [ 1186.982206] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1186.982206] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Faults: ['InvalidArgument'] [ 1186.982206] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] [ 1186.982206] env[62740]: INFO nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Terminating instance [ 1186.983063] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.983274] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1186.983542] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6cd99f6-6d5e-4a90-a2a7-28837f90051b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.985747] env[62740]: DEBUG nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1186.985939] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1186.986962] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9eebdb-3865-414c-9efe-60506adf8c90 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.993638] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1186.993860] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-042f1302-81b4-4b70-9922-07d00b502a91 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.996085] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1186.996262] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1186.997248] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a275b071-4ac5-4378-8f7a-663948088d09 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.001969] env[62740]: DEBUG oslo_vmware.api [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Waiting for the task: (returnval){ [ 1187.001969] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52506e78-b34c-8a97-b623-0441b67c7394" [ 1187.001969] env[62740]: _type = "Task" [ 1187.001969] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.009493] env[62740]: DEBUG oslo_vmware.api [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52506e78-b34c-8a97-b623-0441b67c7394, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.061717] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1187.062193] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1187.062555] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Deleting the datastore file [datastore1] 732da1c8-e83e-4dd7-96c2-dbfa9468baab {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1187.062964] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57c2e868-f8a9-4ce0-b6aa-81b9277a3b7d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.069601] env[62740]: DEBUG oslo_vmware.api [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Waiting for the task: (returnval){ [ 1187.069601] env[62740]: value = "task-640227" [ 1187.069601] env[62740]: _type = "Task" [ 1187.069601] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.077388] env[62740]: DEBUG oslo_vmware.api [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Task: {'id': task-640227, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.514945] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1187.515318] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Creating directory with path [datastore1] vmware_temp/4e7c5007-f922-4262-8fce-471eace49be3/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1187.515513] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a75f392c-5d60-46aa-a4f3-b39f142a31c4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.528449] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Created directory with path [datastore1] vmware_temp/4e7c5007-f922-4262-8fce-471eace49be3/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1187.528686] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Fetch image to [datastore1] vmware_temp/4e7c5007-f922-4262-8fce-471eace49be3/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1187.528880] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/4e7c5007-f922-4262-8fce-471eace49be3/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1187.529715] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d717ed3-0ea6-4cf4-b2a0-44f49c43cc77 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.536885] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8410443e-0d89-4e0b-877f-a6de4401da72 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.546532] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5fe5ba-ca1e-4b1f-b94a-a068a845646b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.581747] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22401138-e0e0-49ad-a6fa-642c7b7363ef {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.591696] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-73644154-9df7-4a69-8cd8-1080187a9acf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.593533] env[62740]: DEBUG oslo_vmware.api [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Task: {'id': task-640227, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07244} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.593771] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1187.593953] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1187.594151] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1187.594309] env[62740]: INFO nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1187.596621] env[62740]: DEBUG nova.compute.claims [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1187.596712] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1187.596922] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.598921] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg 8ebdf2b6453c4bc483fc1f1296060a11 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1187.615736] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1187.642381] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ebdf2b6453c4bc483fc1f1296060a11 [ 1187.815924] env[62740]: DEBUG oslo_vmware.rw_handles [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4e7c5007-f922-4262-8fce-471eace49be3/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1187.886385] env[62740]: DEBUG oslo_vmware.rw_handles [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1187.886569] env[62740]: DEBUG oslo_vmware.rw_handles [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4e7c5007-f922-4262-8fce-471eace49be3/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1187.936044] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1187.941730] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f4bd2d-d400-4457-bdbe-40174834a06d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.949439] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35bccbe-e1d2-4253-ac2e-988fb3f4e1c7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.979084] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec732c9-ed5e-4584-89b8-9acd4e4e0f9b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.986114] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca413cb1-780b-47b1-815d-5b11cc4e56fc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.999151] env[62740]: DEBUG nova.compute.provider_tree [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1187.999669] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg ce6d3a0cafd94abdb4c143621c8f8a91 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.007367] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce6d3a0cafd94abdb4c143621c8f8a91 [ 1188.008336] env[62740]: DEBUG nova.scheduler.client.report [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1188.010696] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg 8c0d6b2be5014ccc84d2069d5c75a699 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.022921] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c0d6b2be5014ccc84d2069d5c75a699 [ 1188.023624] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.427s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.024159] env[62740]: ERROR nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1188.024159] env[62740]: Faults: ['InvalidArgument'] [ 1188.024159] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Traceback (most recent call last): [ 1188.024159] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1188.024159] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] self.driver.spawn(context, instance, image_meta, [ 1188.024159] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1188.024159] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1188.024159] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1188.024159] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] self._fetch_image_if_missing(context, vi) [ 1188.024159] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1188.024159] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] image_cache(vi, tmp_image_ds_loc) [ 1188.024159] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1188.024573] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] vm_util.copy_virtual_disk( [ 1188.024573] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1188.024573] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] session._wait_for_task(vmdk_copy_task) [ 1188.024573] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1188.024573] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] return self.wait_for_task(task_ref) [ 1188.024573] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1188.024573] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] return evt.wait() [ 1188.024573] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1188.024573] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] result = hub.switch() [ 1188.024573] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1188.024573] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] return self.greenlet.switch() [ 1188.024573] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1188.024573] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] self.f(*self.args, **self.kw) [ 1188.024979] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1188.024979] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] raise exceptions.translate_fault(task_info.error) [ 1188.024979] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1188.024979] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Faults: ['InvalidArgument'] [ 1188.024979] env[62740]: ERROR nova.compute.manager [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] [ 1188.024979] env[62740]: DEBUG nova.compute.utils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1188.026786] env[62740]: DEBUG nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Build of instance 732da1c8-e83e-4dd7-96c2-dbfa9468baab was re-scheduled: A specified parameter was not correct: fileType [ 1188.026786] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1188.027049] env[62740]: DEBUG nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1188.027189] env[62740]: DEBUG nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1188.027366] env[62740]: DEBUG nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1188.027532] env[62740]: DEBUG nova.network.neutron [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1188.408677] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg 6fe8af7042884e96af5cb2b89807d242 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.424014] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6fe8af7042884e96af5cb2b89807d242 [ 1188.424630] env[62740]: DEBUG nova.network.neutron [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.425151] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg de31622f36d54d7aa8edddca923dfd1b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.438357] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de31622f36d54d7aa8edddca923dfd1b [ 1188.438357] env[62740]: INFO nova.compute.manager [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Took 0.41 seconds to deallocate network for instance. [ 1188.439645] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg a2d928b4d2a34aa0aad8475c917617a6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.495494] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2d928b4d2a34aa0aad8475c917617a6 [ 1188.497878] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg 6974b41bde51494abef1fc1aca0ea4cc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.540441] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6974b41bde51494abef1fc1aca0ea4cc [ 1188.577361] env[62740]: INFO nova.scheduler.client.report [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Deleted allocations for instance 732da1c8-e83e-4dd7-96c2-dbfa9468baab [ 1188.589417] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg fb424c5b0ae24534b2abeb570a804f57 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.605032] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb424c5b0ae24534b2abeb570a804f57 [ 1188.605632] env[62740]: DEBUG oslo_concurrency.lockutils [None req-542d9f80-375f-4857-81ab-33f313506dfb tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Lock "732da1c8-e83e-4dd7-96c2-dbfa9468baab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 395.626s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.606195] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 440326b82ff3402a9133f0622ca67e65 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.606962] env[62740]: DEBUG oslo_concurrency.lockutils [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Lock "732da1c8-e83e-4dd7-96c2-dbfa9468baab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 199.308s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.607198] env[62740]: DEBUG oslo_concurrency.lockutils [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Acquiring lock "732da1c8-e83e-4dd7-96c2-dbfa9468baab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.607408] env[62740]: DEBUG oslo_concurrency.lockutils [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Lock "732da1c8-e83e-4dd7-96c2-dbfa9468baab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.607572] env[62740]: DEBUG oslo_concurrency.lockutils [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Lock "732da1c8-e83e-4dd7-96c2-dbfa9468baab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.609809] env[62740]: INFO nova.compute.manager [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Terminating instance [ 1188.611881] env[62740]: DEBUG nova.compute.manager [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1188.612536] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1188.613458] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f18c935-6948-42c3-972c-e70142fe29f7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.616194] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 440326b82ff3402a9133f0622ca67e65 [ 1188.616742] env[62740]: DEBUG nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1188.618649] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg bb75d57f67414e07a7b99d674eb4ae9b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.626890] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312aebe1-394f-4a69-9e2f-1267c3297ec3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.656242] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 732da1c8-e83e-4dd7-96c2-dbfa9468baab could not be found. [ 1188.656494] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1188.656733] env[62740]: INFO nova.compute.manager [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1188.657441] env[62740]: DEBUG oslo.service.loopingcall [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1188.657441] env[62740]: DEBUG nova.compute.manager [-] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1188.657441] env[62740]: DEBUG nova.network.neutron [-] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1188.668032] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb75d57f67414e07a7b99d674eb4ae9b [ 1188.678864] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 41419adb5b084e45b3847069bcb6832b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.686298] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.686536] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.688464] env[62740]: INFO nova.compute.claims [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1188.690167] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg b83c4ea8360544828a5db52521cde2e2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.691831] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41419adb5b084e45b3847069bcb6832b [ 1188.692317] env[62740]: DEBUG nova.network.neutron [-] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.692645] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d4c1c579af9647a6bf51db44a4263a8b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.703457] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4c1c579af9647a6bf51db44a4263a8b [ 1188.703929] env[62740]: INFO nova.compute.manager [-] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] Took 0.05 seconds to deallocate network for instance. [ 1188.708597] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg 60bfbfd914dd48368d9ff656a2b98457 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.743728] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b83c4ea8360544828a5db52521cde2e2 [ 1188.745497] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 62681d337230475db486dece5305ab11 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.753052] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60bfbfd914dd48368d9ff656a2b98457 [ 1188.753525] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62681d337230475db486dece5305ab11 [ 1188.778644] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg 53e587fe990b4570a1c518f55c018df8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.833557] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53e587fe990b4570a1c518f55c018df8 [ 1188.839247] env[62740]: DEBUG oslo_concurrency.lockutils [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Lock "732da1c8-e83e-4dd7-96c2-dbfa9468baab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.232s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.839640] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-da5d0e5f-4cb5-4fdf-82de-6e3a122bb293 tempest-ImagesOneServerNegativeTestJSON-44492265 tempest-ImagesOneServerNegativeTestJSON-44492265-project-member] Expecting reply to msg c6312a97c25d40539f851cb6f555d24e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1188.844395] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "732da1c8-e83e-4dd7-96c2-dbfa9468baab" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.587s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.844594] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 732da1c8-e83e-4dd7-96c2-dbfa9468baab] During sync_power_state the instance has a pending task (deleting). Skip. [ 1188.844771] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "732da1c8-e83e-4dd7-96c2-dbfa9468baab" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.855167] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6312a97c25d40539f851cb6f555d24e [ 1188.888471] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.890451] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.027334] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5da25d-ebd5-4777-b70a-76e54fe34759 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.035015] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381d3bbd-6aa5-40c1-93eb-1b165e771a91 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.070767] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4942c6-b6d2-491d-a128-6abff0769f52 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.078764] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b4c166-e983-47c0-ba70-d47aff83b53a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.092885] env[62740]: DEBUG nova.compute.provider_tree [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1189.093242] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 476c51270e48444a9f81a9e4b3894e2a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1189.104042] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 476c51270e48444a9f81a9e4b3894e2a [ 1189.105072] env[62740]: DEBUG nova.scheduler.client.report [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1189.107646] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 7052ad4f1df6475eb5f6c386609f5a67 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1189.119187] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7052ad4f1df6475eb5f6c386609f5a67 [ 1189.119896] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.433s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.120372] env[62740]: DEBUG nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1189.122026] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg a4d7600bc89f42b7a950564a9ab242f6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1189.159068] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4d7600bc89f42b7a950564a9ab242f6 [ 1189.160890] env[62740]: DEBUG nova.compute.utils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1189.161511] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 5127605797d141059b037927f0a20e24 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1189.163054] env[62740]: DEBUG nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1189.163240] env[62740]: DEBUG nova.network.neutron [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1189.177873] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5127605797d141059b037927f0a20e24 [ 1189.178478] env[62740]: DEBUG nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1189.180084] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg fdb5c51d75af455fac19b018174eeac4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1189.222962] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdb5c51d75af455fac19b018174eeac4 [ 1189.228377] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 366644edc63747a6a1d418164771ef49 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1189.249500] env[62740]: DEBUG nova.policy [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ce454271a7f74c61a23d85d6ef09003f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bc583a8c2f9496a986a94b9eca2bc9b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1189.263487] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 366644edc63747a6a1d418164771ef49 [ 1189.264647] env[62740]: DEBUG nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1189.299384] env[62740]: DEBUG nova.virt.hardware [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1189.299628] env[62740]: DEBUG nova.virt.hardware [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1189.299800] env[62740]: DEBUG nova.virt.hardware [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1189.300153] env[62740]: DEBUG nova.virt.hardware [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1189.300343] env[62740]: DEBUG nova.virt.hardware [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1189.300501] env[62740]: DEBUG nova.virt.hardware [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1189.300719] env[62740]: DEBUG nova.virt.hardware [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1189.300885] env[62740]: DEBUG nova.virt.hardware [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1189.301066] env[62740]: DEBUG nova.virt.hardware [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1189.301241] env[62740]: DEBUG nova.virt.hardware [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1189.301451] env[62740]: DEBUG nova.virt.hardware [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1189.302704] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17d51bd-b51b-43f2-bac9-25ec1e4daef6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.312235] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4962f27-7aee-426b-8c84-7725862147ec {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.655313] env[62740]: DEBUG nova.network.neutron [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Successfully created port: 342c902c-1b43-4e8a-bfff-2345c99100ea {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1190.666754] env[62740]: DEBUG nova.network.neutron [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Successfully updated port: 342c902c-1b43-4e8a-bfff-2345c99100ea {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1190.667281] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 016f14d01ab54ef2a32f93df46ee67dc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1190.678869] env[62740]: DEBUG nova.compute.manager [req-358bfe7d-13fd-42c3-9158-de760034bcb9 req-5734e213-dc3d-406f-adf4-d65fbe08b51f service nova] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Received event network-vif-plugged-342c902c-1b43-4e8a-bfff-2345c99100ea {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1190.678919] env[62740]: DEBUG oslo_concurrency.lockutils [req-358bfe7d-13fd-42c3-9158-de760034bcb9 req-5734e213-dc3d-406f-adf4-d65fbe08b51f service nova] Acquiring lock "43e4ddf4-230e-49f7-975f-ba99a6da9398-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.679483] env[62740]: DEBUG oslo_concurrency.lockutils [req-358bfe7d-13fd-42c3-9158-de760034bcb9 req-5734e213-dc3d-406f-adf4-d65fbe08b51f service nova] Lock "43e4ddf4-230e-49f7-975f-ba99a6da9398-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.679483] env[62740]: DEBUG oslo_concurrency.lockutils [req-358bfe7d-13fd-42c3-9158-de760034bcb9 req-5734e213-dc3d-406f-adf4-d65fbe08b51f service nova] Lock "43e4ddf4-230e-49f7-975f-ba99a6da9398-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1190.679483] env[62740]: DEBUG nova.compute.manager [req-358bfe7d-13fd-42c3-9158-de760034bcb9 req-5734e213-dc3d-406f-adf4-d65fbe08b51f service nova] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] No waiting events found dispatching network-vif-plugged-342c902c-1b43-4e8a-bfff-2345c99100ea {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1190.679626] env[62740]: WARNING nova.compute.manager [req-358bfe7d-13fd-42c3-9158-de760034bcb9 req-5734e213-dc3d-406f-adf4-d65fbe08b51f service nova] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Received unexpected event network-vif-plugged-342c902c-1b43-4e8a-bfff-2345c99100ea for instance with vm_state building and task_state spawning. [ 1190.689369] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 016f14d01ab54ef2a32f93df46ee67dc [ 1190.689369] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Acquiring lock "refresh_cache-43e4ddf4-230e-49f7-975f-ba99a6da9398" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.689369] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Acquired lock "refresh_cache-43e4ddf4-230e-49f7-975f-ba99a6da9398" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.689369] env[62740]: DEBUG nova.network.neutron [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1190.689369] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 3cdc55ab480f4f97bf28ad9822f545c9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1190.702397] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3cdc55ab480f4f97bf28ad9822f545c9 [ 1190.890142] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1190.890407] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1190.890582] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1190.890939] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 81c7992172cb4607ab944721130b4de2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1190.901562] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81c7992172cb4607ab944721130b4de2 [ 1190.902692] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.903558] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.903801] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1190.903971] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1190.905089] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40609cef-8409-408e-a6ab-1dc3454cf530 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.915461] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f709b5-2191-49b9-9dbb-a9767f1106f2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.929112] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829e7fde-246b-4350-bf89-9e70995dfd77 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.936163] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62061d0-46ad-4c43-a6df-7d442e298c9f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.966408] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181591MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1190.966579] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.966778] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.967663] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7eca018b17a74d46a7a568da7db5dbd5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1190.969215] env[62740]: DEBUG nova.network.neutron [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1191.012547] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7eca018b17a74d46a7a568da7db5dbd5 [ 1191.017645] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 9ee544e8c8574034a962e3a096c43faf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1191.034143] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ee544e8c8574034a962e3a096c43faf [ 1191.060329] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388a39df-9fa9-4153-9f3c-4ad94fd5edfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.060879] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 5f57389d-853e-4439-872a-8345664578d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.060879] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 472cd209-4192-4473-b788-d1ea342653bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.060879] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d8dac9af-0897-4fbf-8ee6-1fb3955d48c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.060879] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d6c3ca16-5c7c-41e6-9850-10221603ad2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.063805] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 158406db-7196-4826-aefa-20a58daa186b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.063805] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e21a5624-20ca-45d8-a0bf-dd87cec1c701 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.063805] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b0b16f66-8dbc-4e9b-a932-5de45215cfff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.063805] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 56106517-e735-4bf5-8d5a-dc0d4aab3991 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.064327] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 43e4ddf4-230e-49f7-975f-ba99a6da9398 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.064327] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 6c1807cc0398466c862c900692905327 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1191.079733] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c1807cc0398466c862c900692905327 [ 1191.080771] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance eba85edb-4d86-42c9-8b49-98f2173a3eeb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.081146] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 75f84501fe794c4db3646a725e790bae in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1191.095498] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75f84501fe794c4db3646a725e790bae [ 1191.096957] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 7aacf4e0-b508-4a18-909a-3d1fe9458d98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.096957] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg df79bb2b40264606970d4ce54de15322 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1191.109187] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df79bb2b40264606970d4ce54de15322 [ 1191.109959] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6005c9dc-3067-4719-a8f9-befb63f7cd8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.110483] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 608dda319ce44ff2af3d2e1de30d605a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1191.122318] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 608dda319ce44ff2af3d2e1de30d605a [ 1191.123069] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance ba23ede2-be42-48ac-b281-571ccd158dee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.123735] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 27a9b4202925493b922b139625d11497 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1191.136896] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27a9b4202925493b922b139625d11497 [ 1191.137765] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.138196] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5e05272849d94fa7b8a4322591d9e76b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1191.152782] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e05272849d94fa7b8a4322591d9e76b [ 1191.153686] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2deff09f-d24f-4609-91f2-1585e8407c2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.153922] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1191.154086] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1191.240356] env[62740]: DEBUG nova.network.neutron [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Updating instance_info_cache with network_info: [{"id": "342c902c-1b43-4e8a-bfff-2345c99100ea", "address": "fa:16:3e:7f:b6:b2", "network": {"id": "3d8a9694-640a-4035-8c41-e6e4b337f2fc", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2143312999-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bc583a8c2f9496a986a94b9eca2bc9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap342c902c-1b", "ovs_interfaceid": "342c902c-1b43-4e8a-bfff-2345c99100ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.240998] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 81234dd63f29475f9cd6502ba1a34222 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1191.256655] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81234dd63f29475f9cd6502ba1a34222 [ 1191.257317] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Releasing lock "refresh_cache-43e4ddf4-230e-49f7-975f-ba99a6da9398" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1191.257658] env[62740]: DEBUG nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Instance network_info: |[{"id": "342c902c-1b43-4e8a-bfff-2345c99100ea", "address": "fa:16:3e:7f:b6:b2", "network": {"id": "3d8a9694-640a-4035-8c41-e6e4b337f2fc", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2143312999-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bc583a8c2f9496a986a94b9eca2bc9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap342c902c-1b", "ovs_interfaceid": "342c902c-1b43-4e8a-bfff-2345c99100ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1191.258172] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:b6:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '342c902c-1b43-4e8a-bfff-2345c99100ea', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1191.265692] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Creating folder: Project (6bc583a8c2f9496a986a94b9eca2bc9b). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1191.268800] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f7c1e85e-e5f9-4398-b12f-0d1e480aae13 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.333376] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Created folder: Project (6bc583a8c2f9496a986a94b9eca2bc9b) in parent group-v156037. [ 1191.333750] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Creating folder: Instances. Parent ref: group-v156136. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1191.333993] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ff8b99f-851f-4f75-875b-dd198a50ba20 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.344252] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Created folder: Instances in parent group-v156136. [ 1191.345225] env[62740]: DEBUG oslo.service.loopingcall [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1191.345225] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1191.345225] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dcf4c6e7-a465-4ca1-9914-c100d7b66850 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.368682] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1191.368682] env[62740]: value = "task-640230" [ 1191.368682] env[62740]: _type = "Task" [ 1191.368682] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.379858] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640230, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.438486] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg d5167ec2fc704aa6aab650c30bb2cdd8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1191.442784] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6c7a8a-037b-409c-9dd9-56ae6797d67d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.452368] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5167ec2fc704aa6aab650c30bb2cdd8 [ 1191.454757] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f5517a-0252-487f-abb6-3d92360ceee1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.458371] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "b0b16f66-8dbc-4e9b-a932-5de45215cfff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1191.484623] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c045566a-c5f5-4c4e-95db-c29337ed2260 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.492260] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4f3f88-73d1-4302-9920-02946cbae3e8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.506592] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1191.507120] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 67e10cfc4e394b5281648026503e04b2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1191.515247] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67e10cfc4e394b5281648026503e04b2 [ 1191.516169] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1191.518578] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg c08bfa9caebe4632aead61e66765f46b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1191.537231] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c08bfa9caebe4632aead61e66765f46b [ 1191.537794] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1191.539015] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.571s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.879583] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640230, 'name': CreateVM_Task, 'duration_secs': 0.300932} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.879870] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1191.894054] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1191.894242] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.894555] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1191.894806] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6d70138-4a87-45fd-be03-bdf4f26bcaf8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.899836] env[62740]: DEBUG oslo_vmware.api [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Waiting for the task: (returnval){ [ 1191.899836] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]527a5f87-3a22-c023-9b99-92c0454d873c" [ 1191.899836] env[62740]: _type = "Task" [ 1191.899836] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.907617] env[62740]: DEBUG oslo_vmware.api [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]527a5f87-3a22-c023-9b99-92c0454d873c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.411040] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.411991] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1192.411991] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.539312] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.539536] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1192.539674] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1192.540278] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 840b110ea82a4e3fa2523547751a5047 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1192.565419] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 840b110ea82a4e3fa2523547751a5047 [ 1192.569290] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1192.569463] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 5f57389d-853e-4439-872a-8345664578d0] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1192.569601] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1192.569727] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1192.569880] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1192.570038] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 158406db-7196-4826-aefa-20a58daa186b] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1192.570180] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1192.570309] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1192.570441] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1192.570560] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1192.570675] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1192.589742] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "3f36f081-2851-4339-860d-0a302ef4ee2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.589989] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "3f36f081-2851-4339-860d-0a302ef4ee2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.753067] env[62740]: DEBUG nova.compute.manager [req-c8f02d91-06d2-40b7-bb8f-fef32cdc57da req-74a92e6c-fbe1-4405-b33b-c7716be43b84 service nova] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Received event network-changed-342c902c-1b43-4e8a-bfff-2345c99100ea {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1192.753203] env[62740]: DEBUG nova.compute.manager [req-c8f02d91-06d2-40b7-bb8f-fef32cdc57da req-74a92e6c-fbe1-4405-b33b-c7716be43b84 service nova] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Refreshing instance network info cache due to event network-changed-342c902c-1b43-4e8a-bfff-2345c99100ea. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1192.753460] env[62740]: DEBUG oslo_concurrency.lockutils [req-c8f02d91-06d2-40b7-bb8f-fef32cdc57da req-74a92e6c-fbe1-4405-b33b-c7716be43b84 service nova] Acquiring lock "refresh_cache-43e4ddf4-230e-49f7-975f-ba99a6da9398" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.753563] env[62740]: DEBUG oslo_concurrency.lockutils [req-c8f02d91-06d2-40b7-bb8f-fef32cdc57da req-74a92e6c-fbe1-4405-b33b-c7716be43b84 service nova] Acquired lock "refresh_cache-43e4ddf4-230e-49f7-975f-ba99a6da9398" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.753725] env[62740]: DEBUG nova.network.neutron [req-c8f02d91-06d2-40b7-bb8f-fef32cdc57da req-74a92e6c-fbe1-4405-b33b-c7716be43b84 service nova] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Refreshing network info cache for port 342c902c-1b43-4e8a-bfff-2345c99100ea {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1192.754221] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-c8f02d91-06d2-40b7-bb8f-fef32cdc57da req-74a92e6c-fbe1-4405-b33b-c7716be43b84 service nova] Expecting reply to msg a859007c5fc9443888cc25b28b6567b9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1192.761743] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a859007c5fc9443888cc25b28b6567b9 [ 1193.149498] env[62740]: DEBUG nova.network.neutron [req-c8f02d91-06d2-40b7-bb8f-fef32cdc57da req-74a92e6c-fbe1-4405-b33b-c7716be43b84 service nova] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Updated VIF entry in instance network info cache for port 342c902c-1b43-4e8a-bfff-2345c99100ea. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1193.149880] env[62740]: DEBUG nova.network.neutron [req-c8f02d91-06d2-40b7-bb8f-fef32cdc57da req-74a92e6c-fbe1-4405-b33b-c7716be43b84 service nova] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Updating instance_info_cache with network_info: [{"id": "342c902c-1b43-4e8a-bfff-2345c99100ea", "address": "fa:16:3e:7f:b6:b2", "network": {"id": "3d8a9694-640a-4035-8c41-e6e4b337f2fc", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2143312999-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bc583a8c2f9496a986a94b9eca2bc9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap342c902c-1b", "ovs_interfaceid": "342c902c-1b43-4e8a-bfff-2345c99100ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.150428] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-c8f02d91-06d2-40b7-bb8f-fef32cdc57da req-74a92e6c-fbe1-4405-b33b-c7716be43b84 service nova] Expecting reply to msg ec140df013674ac8b7e7c3bf7c5bdba7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1193.158997] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec140df013674ac8b7e7c3bf7c5bdba7 [ 1193.159768] env[62740]: DEBUG oslo_concurrency.lockutils [req-c8f02d91-06d2-40b7-bb8f-fef32cdc57da req-74a92e6c-fbe1-4405-b33b-c7716be43b84 service nova] Releasing lock "refresh_cache-43e4ddf4-230e-49f7-975f-ba99a6da9398" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1193.890584] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1193.890860] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1193.891123] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1197.366878] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 4defba86f8294bb9851d39e77a3f990f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1197.376679] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4defba86f8294bb9851d39e77a3f990f [ 1197.377233] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "56106517-e735-4bf5-8d5a-dc0d4aab3991" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.288727] env[62740]: WARNING oslo_vmware.rw_handles [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1217.288727] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1217.288727] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1217.288727] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1217.288727] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1217.288727] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1217.288727] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1217.288727] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1217.288727] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1217.288727] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1217.288727] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1217.288727] env[62740]: ERROR oslo_vmware.rw_handles [ 1217.289555] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/cf3ab7a6-586b-4224-bb19-1f5aef50ad4f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1217.291583] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1217.291830] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Copying Virtual Disk [datastore2] vmware_temp/cf3ab7a6-586b-4224-bb19-1f5aef50ad4f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/cf3ab7a6-586b-4224-bb19-1f5aef50ad4f/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1217.292151] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ad36aa6-25cc-42fa-acdc-0d87444811a9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.299601] env[62740]: DEBUG oslo_vmware.api [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Waiting for the task: (returnval){ [ 1217.299601] env[62740]: value = "task-640231" [ 1217.299601] env[62740]: _type = "Task" [ 1217.299601] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.308042] env[62740]: DEBUG oslo_vmware.api [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Task: {'id': task-640231, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.810177] env[62740]: DEBUG oslo_vmware.exceptions [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1217.810534] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1217.811153] env[62740]: ERROR nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1217.811153] env[62740]: Faults: ['InvalidArgument'] [ 1217.811153] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Traceback (most recent call last): [ 1217.811153] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1217.811153] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] yield resources [ 1217.811153] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1217.811153] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] self.driver.spawn(context, instance, image_meta, [ 1217.811153] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1217.811153] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1217.811153] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1217.811153] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] self._fetch_image_if_missing(context, vi) [ 1217.811153] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1217.811575] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] image_cache(vi, tmp_image_ds_loc) [ 1217.811575] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1217.811575] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] vm_util.copy_virtual_disk( [ 1217.811575] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1217.811575] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] session._wait_for_task(vmdk_copy_task) [ 1217.811575] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1217.811575] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] return self.wait_for_task(task_ref) [ 1217.811575] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1217.811575] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] return evt.wait() [ 1217.811575] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1217.811575] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] result = hub.switch() [ 1217.811575] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1217.811575] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] return self.greenlet.switch() [ 1217.811939] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1217.811939] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] self.f(*self.args, **self.kw) [ 1217.811939] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1217.811939] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] raise exceptions.translate_fault(task_info.error) [ 1217.811939] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1217.811939] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Faults: ['InvalidArgument'] [ 1217.811939] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] [ 1217.811939] env[62740]: INFO nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Terminating instance [ 1217.813190] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.813434] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1217.813627] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47884a2c-bd05-4dcc-af69-83603d645e1b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.816108] env[62740]: DEBUG nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1217.816327] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1217.817176] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faaa44f8-9dd5-495a-9810-d95b9619bd49 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.824654] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1217.825900] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c3339fde-571f-4ac7-9d93-92202ed134bb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.827007] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1217.827190] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1217.827838] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-361910ee-3718-4d36-a9bf-c538c4dfe2ba {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.833142] env[62740]: DEBUG oslo_vmware.api [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for the task: (returnval){ [ 1217.833142] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52a8db8d-0673-9343-71d8-8cfbf5e27827" [ 1217.833142] env[62740]: _type = "Task" [ 1217.833142] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.840225] env[62740]: DEBUG oslo_vmware.api [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52a8db8d-0673-9343-71d8-8cfbf5e27827, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.905182] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1217.905505] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1217.905731] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Deleting the datastore file [datastore2] e21a5624-20ca-45d8-a0bf-dd87cec1c701 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1217.906098] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be0588e2-9dad-447f-aaab-8cb37e889c25 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.913027] env[62740]: DEBUG oslo_vmware.api [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Waiting for the task: (returnval){ [ 1217.913027] env[62740]: value = "task-640233" [ 1217.913027] env[62740]: _type = "Task" [ 1217.913027] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.920878] env[62740]: DEBUG oslo_vmware.api [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Task: {'id': task-640233, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.343844] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1218.344178] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Creating directory with path [datastore2] vmware_temp/7cc9fcd2-21a4-43f9-b555-6bd80fa2522a/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1218.344358] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb520301-4d21-4735-abff-7f8e9d723916 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.355986] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Created directory with path [datastore2] vmware_temp/7cc9fcd2-21a4-43f9-b555-6bd80fa2522a/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1218.356182] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Fetch image to [datastore2] vmware_temp/7cc9fcd2-21a4-43f9-b555-6bd80fa2522a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1218.356378] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/7cc9fcd2-21a4-43f9-b555-6bd80fa2522a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1218.357181] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8f0e32-4149-4dcb-a8fe-0467d37aab1a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.363775] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843089bd-9433-4624-a195-16a6b9ddbcde {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.372621] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5589be7f-7508-452c-960f-63f860938589 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.403836] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7672a3-e083-490d-b22f-4f369d6df4ea {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.409503] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b6db78db-bcc1-4e0e-ba73-a1d0b3346a17 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.420541] env[62740]: DEBUG oslo_vmware.api [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Task: {'id': task-640233, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094885} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.420794] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1218.420976] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1218.421169] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1218.421353] env[62740]: INFO nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1218.423468] env[62740]: DEBUG nova.compute.claims [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1218.423644] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.423860] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1218.425892] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg 8eebd14d6c5442b9b04a7a47eceded30 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1218.432661] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1218.466129] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8eebd14d6c5442b9b04a7a47eceded30 [ 1218.484375] env[62740]: DEBUG oslo_vmware.rw_handles [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7cc9fcd2-21a4-43f9-b555-6bd80fa2522a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1218.552446] env[62740]: DEBUG oslo_vmware.rw_handles [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1218.552668] env[62740]: DEBUG oslo_vmware.rw_handles [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7cc9fcd2-21a4-43f9-b555-6bd80fa2522a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1218.743096] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49128092-5a6f-4682-9cb6-559d54867312 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.751374] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59ec396-bde6-42b7-a89e-182af295f46a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.782755] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c79935e-d02b-489f-88b7-b7c69c6778bd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.790347] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e90a17-f543-415b-aa2e-7fcdf92f3d89 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.803343] env[62740]: DEBUG nova.compute.provider_tree [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1218.803857] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg ec16345a99fb4424906e862a2e123ed3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1218.813110] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec16345a99fb4424906e862a2e123ed3 [ 1218.813842] env[62740]: DEBUG nova.scheduler.client.report [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1218.816161] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg e8536efe8bf34778bb6b61b61c0415e4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1218.829053] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8536efe8bf34778bb6b61b61c0415e4 [ 1218.829157] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.405s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1218.829700] env[62740]: ERROR nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1218.829700] env[62740]: Faults: ['InvalidArgument'] [ 1218.829700] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Traceback (most recent call last): [ 1218.829700] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1218.829700] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] self.driver.spawn(context, instance, image_meta, [ 1218.829700] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1218.829700] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1218.829700] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1218.829700] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] self._fetch_image_if_missing(context, vi) [ 1218.829700] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1218.829700] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] image_cache(vi, tmp_image_ds_loc) [ 1218.829700] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1218.830192] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] vm_util.copy_virtual_disk( [ 1218.830192] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1218.830192] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] session._wait_for_task(vmdk_copy_task) [ 1218.830192] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1218.830192] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] return self.wait_for_task(task_ref) [ 1218.830192] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1218.830192] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] return evt.wait() [ 1218.830192] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1218.830192] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] result = hub.switch() [ 1218.830192] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1218.830192] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] return self.greenlet.switch() [ 1218.830192] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1218.830192] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] self.f(*self.args, **self.kw) [ 1218.830687] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1218.830687] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] raise exceptions.translate_fault(task_info.error) [ 1218.830687] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1218.830687] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Faults: ['InvalidArgument'] [ 1218.830687] env[62740]: ERROR nova.compute.manager [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] [ 1218.830687] env[62740]: DEBUG nova.compute.utils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1218.831934] env[62740]: DEBUG nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Build of instance e21a5624-20ca-45d8-a0bf-dd87cec1c701 was re-scheduled: A specified parameter was not correct: fileType [ 1218.831934] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1218.832348] env[62740]: DEBUG nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1218.832524] env[62740]: DEBUG nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1218.832698] env[62740]: DEBUG nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1218.832871] env[62740]: DEBUG nova.network.neutron [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1219.082670] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "913ddb91-9d46-459e-8775-c9f380ed3cc4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.082886] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "913ddb91-9d46-459e-8775-c9f380ed3cc4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.220292] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg e4f67b79aae14c40b1e40431f159f4ad in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.231029] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4f67b79aae14c40b1e40431f159f4ad [ 1219.231517] env[62740]: DEBUG nova.network.neutron [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.231999] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg 9d9aed040db64221afa8002a2a89b23e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.243582] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d9aed040db64221afa8002a2a89b23e [ 1219.244571] env[62740]: INFO nova.compute.manager [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Took 0.41 seconds to deallocate network for instance. [ 1219.246360] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg c0631aa29c614ccb80a60a8162dc79b6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.291192] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0631aa29c614ccb80a60a8162dc79b6 [ 1219.295908] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg e4c1f5a1a7134e338576f177bb2f5911 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.326660] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4c1f5a1a7134e338576f177bb2f5911 [ 1219.355548] env[62740]: INFO nova.scheduler.client.report [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Deleted allocations for instance e21a5624-20ca-45d8-a0bf-dd87cec1c701 [ 1219.360800] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg a9661f346ff742ec88c36e7c286a2e34 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.381496] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9661f346ff742ec88c36e7c286a2e34 [ 1219.381804] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1a67626d-d93d-4307-a883-dc2f893b445d tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Lock "e21a5624-20ca-45d8-a0bf-dd87cec1c701" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 264.839s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.382435] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg 52b7a41ed3d0496fae8253096dc1e121 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.383413] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Lock "e21a5624-20ca-45d8-a0bf-dd87cec1c701" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 68.362s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.383413] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Acquiring lock "e21a5624-20ca-45d8-a0bf-dd87cec1c701-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.383757] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Lock "e21a5624-20ca-45d8-a0bf-dd87cec1c701-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.383869] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Lock "e21a5624-20ca-45d8-a0bf-dd87cec1c701-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.385849] env[62740]: INFO nova.compute.manager [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Terminating instance [ 1219.391017] env[62740]: DEBUG nova.compute.manager [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1219.391017] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1219.391017] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-92ce722d-e57b-4f27-a1bf-961a211a4afe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.394622] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52b7a41ed3d0496fae8253096dc1e121 [ 1219.395008] env[62740]: DEBUG nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1219.396637] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg 4f3527e01d2a4d42bda1a5e4afda86a4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.401540] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-972fcddb-2c31-4101-b78e-aaee8c47dcf0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.430475] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f3527e01d2a4d42bda1a5e4afda86a4 [ 1219.430897] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e21a5624-20ca-45d8-a0bf-dd87cec1c701 could not be found. [ 1219.431097] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1219.431327] env[62740]: INFO nova.compute.manager [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1219.431519] env[62740]: DEBUG oslo.service.loopingcall [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1219.433863] env[62740]: DEBUG nova.compute.manager [-] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1219.433968] env[62740]: DEBUG nova.network.neutron [-] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1219.454105] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.454105] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.454105] env[62740]: INFO nova.compute.claims [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1219.455862] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg 468b936ef47e4b14829db03a291e4678 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.462251] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ceb1ecafe845432fa4d057cbee95547d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.470110] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ceb1ecafe845432fa4d057cbee95547d [ 1219.470564] env[62740]: DEBUG nova.network.neutron [-] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.471077] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 19a72c7880f0489089637ca51a5585af in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.502850] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19a72c7880f0489089637ca51a5585af [ 1219.503198] env[62740]: INFO nova.compute.manager [-] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] Took 0.07 seconds to deallocate network for instance. [ 1219.506943] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg 511c990bd94e4ab790bf64445e828c5c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.511028] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 468b936ef47e4b14829db03a291e4678 [ 1219.512572] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg 252e0dd579f64bb5a3dcf5ddf02e3663 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.519806] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 252e0dd579f64bb5a3dcf5ddf02e3663 [ 1219.537240] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 511c990bd94e4ab790bf64445e828c5c [ 1219.554562] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg d713641d52354b11885c64cfb90998dd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.593666] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d713641d52354b11885c64cfb90998dd [ 1219.596476] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Lock "e21a5624-20ca-45d8-a0bf-dd87cec1c701" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.213s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.596762] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8b97929b-3997-4b96-9edb-fffdb2f10b2a tempest-ServerActionsTestJSON-530632132 tempest-ServerActionsTestJSON-530632132-project-member] Expecting reply to msg 3fb628e7dc83477483e78ce379263f35 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.597431] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "e21a5624-20ca-45d8-a0bf-dd87cec1c701" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 37.338s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.597623] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: e21a5624-20ca-45d8-a0bf-dd87cec1c701] During sync_power_state the instance has a pending task (deleting). Skip. [ 1219.597797] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "e21a5624-20ca-45d8-a0bf-dd87cec1c701" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.606829] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3fb628e7dc83477483e78ce379263f35 [ 1219.719714] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61ac21e-59b2-40f4-af18-9744efae0e4b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.728226] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38033b3c-097a-4853-85b5-a2c729763643 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.757416] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cfb87f3-89de-4a89-af64-4c52032a6557 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.764315] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0c5392-59bc-45e7-b6ec-cd17fee1b9c7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.777539] env[62740]: DEBUG nova.compute.provider_tree [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1219.778045] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg 7dd42f5220bd4dc4b3337f41d0291df6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.787178] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7dd42f5220bd4dc4b3337f41d0291df6 [ 1219.788086] env[62740]: DEBUG nova.scheduler.client.report [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1219.790381] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg e4ffcecc526043e180a2f1a7c26c8b10 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.800756] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4ffcecc526043e180a2f1a7c26c8b10 [ 1219.801437] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.349s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.801886] env[62740]: DEBUG nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1219.803445] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg 375f7f15d7a848a89d63f99c8beeb35b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.831468] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 375f7f15d7a848a89d63f99c8beeb35b [ 1219.833085] env[62740]: DEBUG nova.compute.utils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1219.833693] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg da31635bc3734b5495a83e91bf6d899c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.834699] env[62740]: DEBUG nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1219.834897] env[62740]: DEBUG nova.network.neutron [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1219.842257] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da31635bc3734b5495a83e91bf6d899c [ 1219.842757] env[62740]: DEBUG nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1219.844543] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg c3f363a80ebd4ec79ebcab12b51ec30e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.871693] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3f363a80ebd4ec79ebcab12b51ec30e [ 1219.874239] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg c9c4d7d179f843888054b92f4455d8e4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1219.890610] env[62740]: DEBUG nova.policy [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c0f6b27aa807467e8994e0a1d981b8ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0c7203aa243a4070bbf167c76e9ede0c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1219.903257] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9c4d7d179f843888054b92f4455d8e4 [ 1219.904273] env[62740]: DEBUG nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1219.928871] env[62740]: DEBUG nova.virt.hardware [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1219.928871] env[62740]: DEBUG nova.virt.hardware [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1219.928871] env[62740]: DEBUG nova.virt.hardware [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1219.929098] env[62740]: DEBUG nova.virt.hardware [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1219.929219] env[62740]: DEBUG nova.virt.hardware [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1219.929486] env[62740]: DEBUG nova.virt.hardware [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1219.929613] env[62740]: DEBUG nova.virt.hardware [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1219.929794] env[62740]: DEBUG nova.virt.hardware [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1219.930073] env[62740]: DEBUG nova.virt.hardware [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1219.930297] env[62740]: DEBUG nova.virt.hardware [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1219.930554] env[62740]: DEBUG nova.virt.hardware [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1219.931517] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6452c0-60c3-4458-b815-278ce335d94a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.939350] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fdb1086-49e3-434f-86ed-e018b88f6a47 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.327838] env[62740]: DEBUG nova.network.neutron [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Successfully created port: 48529d4f-76ea-4009-8dfb-1dafebe478f1 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1221.393544] env[62740]: DEBUG nova.network.neutron [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Successfully updated port: 48529d4f-76ea-4009-8dfb-1dafebe478f1 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1221.394235] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg f18b12ffb64645bc9e51b2a0e486a541 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1221.407588] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f18b12ffb64645bc9e51b2a0e486a541 [ 1221.408354] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Acquiring lock "refresh_cache-eba85edb-4d86-42c9-8b49-98f2173a3eeb" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1221.408557] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Acquired lock "refresh_cache-eba85edb-4d86-42c9-8b49-98f2173a3eeb" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1221.408730] env[62740]: DEBUG nova.network.neutron [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1221.409142] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg 83beec237b044d38b847b556065399bb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1221.417534] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83beec237b044d38b847b556065399bb [ 1221.486165] env[62740]: DEBUG nova.network.neutron [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1221.652394] env[62740]: DEBUG nova.compute.manager [req-281aa59d-40b6-4ea7-abbd-1b0898f4a42d req-df742564-e02f-444d-9d01-3a4fd25fc9a6 service nova] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Received event network-vif-plugged-48529d4f-76ea-4009-8dfb-1dafebe478f1 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1221.652663] env[62740]: DEBUG oslo_concurrency.lockutils [req-281aa59d-40b6-4ea7-abbd-1b0898f4a42d req-df742564-e02f-444d-9d01-3a4fd25fc9a6 service nova] Acquiring lock "eba85edb-4d86-42c9-8b49-98f2173a3eeb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.652875] env[62740]: DEBUG oslo_concurrency.lockutils [req-281aa59d-40b6-4ea7-abbd-1b0898f4a42d req-df742564-e02f-444d-9d01-3a4fd25fc9a6 service nova] Lock "eba85edb-4d86-42c9-8b49-98f2173a3eeb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.653057] env[62740]: DEBUG oslo_concurrency.lockutils [req-281aa59d-40b6-4ea7-abbd-1b0898f4a42d req-df742564-e02f-444d-9d01-3a4fd25fc9a6 service nova] Lock "eba85edb-4d86-42c9-8b49-98f2173a3eeb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1221.653231] env[62740]: DEBUG nova.compute.manager [req-281aa59d-40b6-4ea7-abbd-1b0898f4a42d req-df742564-e02f-444d-9d01-3a4fd25fc9a6 service nova] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] No waiting events found dispatching network-vif-plugged-48529d4f-76ea-4009-8dfb-1dafebe478f1 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1221.653398] env[62740]: WARNING nova.compute.manager [req-281aa59d-40b6-4ea7-abbd-1b0898f4a42d req-df742564-e02f-444d-9d01-3a4fd25fc9a6 service nova] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Received unexpected event network-vif-plugged-48529d4f-76ea-4009-8dfb-1dafebe478f1 for instance with vm_state building and task_state spawning. [ 1221.727214] env[62740]: DEBUG nova.network.neutron [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Updating instance_info_cache with network_info: [{"id": "48529d4f-76ea-4009-8dfb-1dafebe478f1", "address": "fa:16:3e:19:d3:ff", "network": {"id": "2b68edbf-7219-410f-9351-aeb80e4120b1", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-613150733-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7203aa243a4070bbf167c76e9ede0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48529d4f-76", "ovs_interfaceid": "48529d4f-76ea-4009-8dfb-1dafebe478f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.727730] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg 578eb46976794a7b96b7d3208322cc20 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1221.742236] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 578eb46976794a7b96b7d3208322cc20 [ 1221.742870] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Releasing lock "refresh_cache-eba85edb-4d86-42c9-8b49-98f2173a3eeb" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1221.743178] env[62740]: DEBUG nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Instance network_info: |[{"id": "48529d4f-76ea-4009-8dfb-1dafebe478f1", "address": "fa:16:3e:19:d3:ff", "network": {"id": "2b68edbf-7219-410f-9351-aeb80e4120b1", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-613150733-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7203aa243a4070bbf167c76e9ede0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48529d4f-76", "ovs_interfaceid": "48529d4f-76ea-4009-8dfb-1dafebe478f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1221.743591] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:d3:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '48529d4f-76ea-4009-8dfb-1dafebe478f1', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1221.751698] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Creating folder: Project (0c7203aa243a4070bbf167c76e9ede0c). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1221.752259] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-862be1bd-23fa-40a7-a310-56769a8909db {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.763713] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Created folder: Project (0c7203aa243a4070bbf167c76e9ede0c) in parent group-v156037. [ 1221.763999] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Creating folder: Instances. Parent ref: group-v156139. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1221.764267] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ba9c813-aae8-47a0-9963-188fbd58495b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.773904] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Created folder: Instances in parent group-v156139. [ 1221.774156] env[62740]: DEBUG oslo.service.loopingcall [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1221.774342] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1221.774539] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8113ff3-5fea-4980-a29e-5836c0e15f00 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.795894] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1221.795894] env[62740]: value = "task-640236" [ 1221.795894] env[62740]: _type = "Task" [ 1221.795894] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.804057] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640236, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.306899] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640236, 'name': CreateVM_Task, 'duration_secs': 0.328128} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.307098] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1222.307760] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1222.307928] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.308271] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1222.308560] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60999829-3618-45bb-a010-b273f769980e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.314015] env[62740]: DEBUG oslo_vmware.api [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Waiting for the task: (returnval){ [ 1222.314015] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52f664e2-7ba6-068f-9300-96cf12a188e7" [ 1222.314015] env[62740]: _type = "Task" [ 1222.314015] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.320568] env[62740]: DEBUG oslo_vmware.api [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52f664e2-7ba6-068f-9300-96cf12a188e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.824405] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1222.825233] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1222.825772] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1223.684456] env[62740]: DEBUG nova.compute.manager [req-216a3579-63aa-4b67-9f0a-2c571fd6ef49 req-ff35efff-9f03-4151-9576-aeb3952d3253 service nova] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Received event network-changed-48529d4f-76ea-4009-8dfb-1dafebe478f1 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1223.684639] env[62740]: DEBUG nova.compute.manager [req-216a3579-63aa-4b67-9f0a-2c571fd6ef49 req-ff35efff-9f03-4151-9576-aeb3952d3253 service nova] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Refreshing instance network info cache due to event network-changed-48529d4f-76ea-4009-8dfb-1dafebe478f1. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1223.684800] env[62740]: DEBUG oslo_concurrency.lockutils [req-216a3579-63aa-4b67-9f0a-2c571fd6ef49 req-ff35efff-9f03-4151-9576-aeb3952d3253 service nova] Acquiring lock "refresh_cache-eba85edb-4d86-42c9-8b49-98f2173a3eeb" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1223.684988] env[62740]: DEBUG oslo_concurrency.lockutils [req-216a3579-63aa-4b67-9f0a-2c571fd6ef49 req-ff35efff-9f03-4151-9576-aeb3952d3253 service nova] Acquired lock "refresh_cache-eba85edb-4d86-42c9-8b49-98f2173a3eeb" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.685178] env[62740]: DEBUG nova.network.neutron [req-216a3579-63aa-4b67-9f0a-2c571fd6ef49 req-ff35efff-9f03-4151-9576-aeb3952d3253 service nova] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Refreshing network info cache for port 48529d4f-76ea-4009-8dfb-1dafebe478f1 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1223.685728] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-216a3579-63aa-4b67-9f0a-2c571fd6ef49 req-ff35efff-9f03-4151-9576-aeb3952d3253 service nova] Expecting reply to msg f81205cbf1c74afa97b07b8d5bf2d084 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1223.692548] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f81205cbf1c74afa97b07b8d5bf2d084 [ 1224.234972] env[62740]: DEBUG nova.network.neutron [req-216a3579-63aa-4b67-9f0a-2c571fd6ef49 req-ff35efff-9f03-4151-9576-aeb3952d3253 service nova] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Updated VIF entry in instance network info cache for port 48529d4f-76ea-4009-8dfb-1dafebe478f1. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1224.235426] env[62740]: DEBUG nova.network.neutron [req-216a3579-63aa-4b67-9f0a-2c571fd6ef49 req-ff35efff-9f03-4151-9576-aeb3952d3253 service nova] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Updating instance_info_cache with network_info: [{"id": "48529d4f-76ea-4009-8dfb-1dafebe478f1", "address": "fa:16:3e:19:d3:ff", "network": {"id": "2b68edbf-7219-410f-9351-aeb80e4120b1", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-613150733-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7203aa243a4070bbf167c76e9ede0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48529d4f-76", "ovs_interfaceid": "48529d4f-76ea-4009-8dfb-1dafebe478f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.235970] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-216a3579-63aa-4b67-9f0a-2c571fd6ef49 req-ff35efff-9f03-4151-9576-aeb3952d3253 service nova] Expecting reply to msg 2f6968a3775c44ca9e9a2200a17678d6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1224.244683] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f6968a3775c44ca9e9a2200a17678d6 [ 1224.245334] env[62740]: DEBUG oslo_concurrency.lockutils [req-216a3579-63aa-4b67-9f0a-2c571fd6ef49 req-ff35efff-9f03-4151-9576-aeb3952d3253 service nova] Releasing lock "refresh_cache-eba85edb-4d86-42c9-8b49-98f2173a3eeb" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1224.250937] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "07efd13e-40d0-4158-b17c-6f5c75474ce3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1224.251197] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "07efd13e-40d0-4158-b17c-6f5c75474ce3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1232.416735] env[62740]: DEBUG oslo_concurrency.lockutils [None req-033d8c43-bec8-4ca4-acdf-fc4f35477525 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "8a156903-e4cf-43ed-9c6a-962a06ff9ef4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1232.417115] env[62740]: DEBUG oslo_concurrency.lockutils [None req-033d8c43-bec8-4ca4-acdf-fc4f35477525 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "8a156903-e4cf-43ed-9c6a-962a06ff9ef4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.501115] env[62740]: WARNING oslo_vmware.rw_handles [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1234.501115] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1234.501115] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1234.501115] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1234.501115] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1234.501115] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1234.501115] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1234.501115] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1234.501115] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1234.501115] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1234.501115] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1234.501115] env[62740]: ERROR oslo_vmware.rw_handles [ 1234.501115] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/4e7c5007-f922-4262-8fce-471eace49be3/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1234.502446] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1234.502687] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Copying Virtual Disk [datastore1] vmware_temp/4e7c5007-f922-4262-8fce-471eace49be3/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/4e7c5007-f922-4262-8fce-471eace49be3/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1234.503035] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5bef454-4d3f-4055-af3a-3eaae3c8816b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.511745] env[62740]: DEBUG oslo_vmware.api [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Waiting for the task: (returnval){ [ 1234.511745] env[62740]: value = "task-640237" [ 1234.511745] env[62740]: _type = "Task" [ 1234.511745] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.523485] env[62740]: DEBUG oslo_vmware.api [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Task: {'id': task-640237, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.023268] env[62740]: DEBUG oslo_vmware.exceptions [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1235.023578] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1235.024164] env[62740]: ERROR nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1235.024164] env[62740]: Faults: ['InvalidArgument'] [ 1235.024164] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Traceback (most recent call last): [ 1235.024164] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1235.024164] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] yield resources [ 1235.024164] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1235.024164] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] self.driver.spawn(context, instance, image_meta, [ 1235.024164] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1235.024164] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1235.024164] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1235.024164] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] self._fetch_image_if_missing(context, vi) [ 1235.024164] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1235.024553] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] image_cache(vi, tmp_image_ds_loc) [ 1235.024553] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1235.024553] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] vm_util.copy_virtual_disk( [ 1235.024553] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1235.024553] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] session._wait_for_task(vmdk_copy_task) [ 1235.024553] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1235.024553] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] return self.wait_for_task(task_ref) [ 1235.024553] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1235.024553] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] return evt.wait() [ 1235.024553] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1235.024553] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] result = hub.switch() [ 1235.024553] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1235.024553] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] return self.greenlet.switch() [ 1235.025032] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1235.025032] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] self.f(*self.args, **self.kw) [ 1235.025032] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1235.025032] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] raise exceptions.translate_fault(task_info.error) [ 1235.025032] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1235.025032] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Faults: ['InvalidArgument'] [ 1235.025032] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] [ 1235.025032] env[62740]: INFO nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Terminating instance [ 1235.026122] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.026355] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1235.029616] env[62740]: DEBUG nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1235.029885] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1235.030234] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca88ce6d-7b46-45d2-b70b-0e0183569592 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.032784] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f63399-54f1-4663-928e-af98efab676e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.043088] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1235.043343] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-47f1fa85-f85f-4864-9a78-4febfa584195 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.045717] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1235.045897] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1235.046898] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f0cb3a3-d4cf-4693-8307-b58797593317 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.051659] env[62740]: DEBUG oslo_vmware.api [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for the task: (returnval){ [ 1235.051659] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52d6e2a7-8987-8053-c26d-43e61274e1be" [ 1235.051659] env[62740]: _type = "Task" [ 1235.051659] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.060501] env[62740]: DEBUG oslo_vmware.api [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52d6e2a7-8987-8053-c26d-43e61274e1be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.104076] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1235.104302] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1235.104487] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Deleting the datastore file [datastore1] 388a39df-9fa9-4153-9f3c-4ad94fd5edfb {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1235.104756] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d06b34b7-2c58-4176-8149-685e97be9ae9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.111411] env[62740]: DEBUG oslo_vmware.api [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Waiting for the task: (returnval){ [ 1235.111411] env[62740]: value = "task-640239" [ 1235.111411] env[62740]: _type = "Task" [ 1235.111411] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.119315] env[62740]: DEBUG oslo_vmware.api [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Task: {'id': task-640239, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.561429] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1235.561714] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Creating directory with path [datastore1] vmware_temp/05237dbb-4d83-4ff4-857d-9906a1c55357/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1235.561937] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf024767-647d-426b-b6b3-8caf965b3ec8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.575266] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Created directory with path [datastore1] vmware_temp/05237dbb-4d83-4ff4-857d-9906a1c55357/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1235.575477] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Fetch image to [datastore1] vmware_temp/05237dbb-4d83-4ff4-857d-9906a1c55357/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1235.575647] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/05237dbb-4d83-4ff4-857d-9906a1c55357/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1235.576465] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe20304c-e7d5-40c5-8d42-769c2b3c774f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.584352] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78078de5-8787-464f-b7d2-086666344c53 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.593937] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1964093d-7cb5-4c3a-8346-44c2daef95d9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.629685] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e3200a-4944-414d-bfce-4298a11b418a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.637652] env[62740]: DEBUG oslo_vmware.api [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Task: {'id': task-640239, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065117} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.638848] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1235.639104] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1235.639326] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1235.639540] env[62740]: INFO nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1235.641736] env[62740]: DEBUG nova.compute.claims [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1235.641954] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1235.643009] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1235.647019] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 62becd9775d14029825f350460aa9c20 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1235.647019] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-994d27e4-8e43-45cb-84cb-2835d6a9f36a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.674238] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1235.760329] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62becd9775d14029825f350460aa9c20 [ 1235.815572] env[62740]: DEBUG oslo_vmware.rw_handles [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/05237dbb-4d83-4ff4-857d-9906a1c55357/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1235.883009] env[62740]: DEBUG oslo_vmware.rw_handles [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1235.883250] env[62740]: DEBUG oslo_vmware.rw_handles [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/05237dbb-4d83-4ff4-857d-9906a1c55357/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1236.100251] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926c3d8c-799b-45e7-92a6-42b245dd5de9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.107898] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f825ce-ce42-4b7b-9763-2320c2346d31 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.141461] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e46795-5de3-4c64-80f5-02a5b2c3cc7b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.149248] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200795ea-7ea5-4d07-9a63-8aeba72e62b7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.163107] env[62740]: DEBUG nova.compute.provider_tree [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1236.163615] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 63616383c6b942acbf78a5538af46f15 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1236.172099] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63616383c6b942acbf78a5538af46f15 [ 1236.173171] env[62740]: DEBUG nova.scheduler.client.report [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1236.175331] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg b32d8a15df2249abbf6e1e1420239f64 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1236.188037] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b32d8a15df2249abbf6e1e1420239f64 [ 1236.188967] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.547s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1236.189523] env[62740]: ERROR nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1236.189523] env[62740]: Faults: ['InvalidArgument'] [ 1236.189523] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Traceback (most recent call last): [ 1236.189523] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1236.189523] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] self.driver.spawn(context, instance, image_meta, [ 1236.189523] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1236.189523] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1236.189523] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1236.189523] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] self._fetch_image_if_missing(context, vi) [ 1236.189523] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1236.189523] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] image_cache(vi, tmp_image_ds_loc) [ 1236.189523] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1236.190150] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] vm_util.copy_virtual_disk( [ 1236.190150] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1236.190150] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] session._wait_for_task(vmdk_copy_task) [ 1236.190150] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1236.190150] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] return self.wait_for_task(task_ref) [ 1236.190150] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1236.190150] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] return evt.wait() [ 1236.190150] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1236.190150] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] result = hub.switch() [ 1236.190150] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1236.190150] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] return self.greenlet.switch() [ 1236.190150] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1236.190150] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] self.f(*self.args, **self.kw) [ 1236.190657] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1236.190657] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] raise exceptions.translate_fault(task_info.error) [ 1236.190657] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1236.190657] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Faults: ['InvalidArgument'] [ 1236.190657] env[62740]: ERROR nova.compute.manager [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] [ 1236.190657] env[62740]: DEBUG nova.compute.utils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1236.191732] env[62740]: DEBUG nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Build of instance 388a39df-9fa9-4153-9f3c-4ad94fd5edfb was re-scheduled: A specified parameter was not correct: fileType [ 1236.191732] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1236.192128] env[62740]: DEBUG nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1236.192305] env[62740]: DEBUG nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1236.192462] env[62740]: DEBUG nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1236.192623] env[62740]: DEBUG nova.network.neutron [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1236.921681] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 31c56f8a202c4ba4b1882bf4cb3679dd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1236.932597] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31c56f8a202c4ba4b1882bf4cb3679dd [ 1236.934448] env[62740]: DEBUG nova.network.neutron [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.934448] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg a7645c5d9a054adfb888fe28f812aefb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1236.943250] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7645c5d9a054adfb888fe28f812aefb [ 1236.943967] env[62740]: INFO nova.compute.manager [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Took 0.75 seconds to deallocate network for instance. [ 1236.946223] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg b495f29944674d2e9925c94eff8353a4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1236.997236] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b495f29944674d2e9925c94eff8353a4 [ 1237.001482] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg f23e2d848e5243a58dbba0d8035cd8f3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.035811] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f23e2d848e5243a58dbba0d8035cd8f3 [ 1237.068221] env[62740]: INFO nova.scheduler.client.report [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Deleted allocations for instance 388a39df-9fa9-4153-9f3c-4ad94fd5edfb [ 1237.082673] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 1681ae46d841404996c7efe1383abd1c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.097150] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1681ae46d841404996c7efe1383abd1c [ 1237.100418] env[62740]: DEBUG oslo_concurrency.lockutils [None req-460e1ef7-c675-47ff-b4ea-1de3efe128bd tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "388a39df-9fa9-4153-9f3c-4ad94fd5edfb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 411.430s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.100418] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 4cdd8741efe6470c81ef552723c6b348 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.100418] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "388a39df-9fa9-4153-9f3c-4ad94fd5edfb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 214.945s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.100418] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "388a39df-9fa9-4153-9f3c-4ad94fd5edfb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1237.100742] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "388a39df-9fa9-4153-9f3c-4ad94fd5edfb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.100742] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "388a39df-9fa9-4153-9f3c-4ad94fd5edfb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.101774] env[62740]: INFO nova.compute.manager [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Terminating instance [ 1237.104901] env[62740]: DEBUG nova.compute.manager [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1237.104901] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1237.104901] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d0c47ff7-f67b-4002-97e7-3befee3525c4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.114508] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a49448-820e-438d-b88f-6ac194ca956c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.126142] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cdd8741efe6470c81ef552723c6b348 [ 1237.126301] env[62740]: DEBUG nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1237.128401] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg cb3de3b303824594b77728660cc3cb79 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.147779] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 388a39df-9fa9-4153-9f3c-4ad94fd5edfb could not be found. [ 1237.147779] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1237.147779] env[62740]: INFO nova.compute.manager [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1237.147779] env[62740]: DEBUG oslo.service.loopingcall [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1237.147779] env[62740]: DEBUG nova.compute.manager [-] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1237.148111] env[62740]: DEBUG nova.network.neutron [-] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1237.176929] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 996bb1bb2e6c44a89dd984af5c0ae16f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.178709] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb3de3b303824594b77728660cc3cb79 [ 1237.183594] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 996bb1bb2e6c44a89dd984af5c0ae16f [ 1237.184094] env[62740]: DEBUG nova.network.neutron [-] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.184835] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 568090e889164b89bacd527f324f8fa1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.191060] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 568090e889164b89bacd527f324f8fa1 [ 1237.191503] env[62740]: INFO nova.compute.manager [-] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] Took 0.04 seconds to deallocate network for instance. [ 1237.194929] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg a3ab72babd534268ad4b8dd6cc426055 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.197037] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1237.197269] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.198842] env[62740]: INFO nova.compute.claims [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1237.200438] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg addcd8ed90de44e49e97037c5143ae07 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.228753] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3ab72babd534268ad4b8dd6cc426055 [ 1237.234883] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg addcd8ed90de44e49e97037c5143ae07 [ 1237.236617] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 38faceb440694fbb8bd7f1edf49853ab in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.244524] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38faceb440694fbb8bd7f1edf49853ab [ 1237.251407] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg c815c7f666114b24b6e19318203efa62 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.304607] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c815c7f666114b24b6e19318203efa62 [ 1237.311836] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "388a39df-9fa9-4153-9f3c-4ad94fd5edfb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.213s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.312324] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a8751302-ee3c-4f1d-b0dc-e986fd148f21 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg c9857689680e451cb9d115e32e4cc10e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.313050] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "388a39df-9fa9-4153-9f3c-4ad94fd5edfb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 55.055s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.313241] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 388a39df-9fa9-4153-9f3c-4ad94fd5edfb] During sync_power_state the instance has a pending task (deleting). Skip. [ 1237.313416] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "388a39df-9fa9-4153-9f3c-4ad94fd5edfb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.329998] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9857689680e451cb9d115e32e4cc10e [ 1237.514950] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a60945-dedb-4654-8790-a0b9ddc5630f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.521479] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a682724-da04-4f4f-b089-066b1c95820f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.556503] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-402fb194-93cd-4685-b1d6-29ae090804b3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.564483] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2110f373-4d3c-458d-aff6-1fd877e12481 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.580205] env[62740]: DEBUG nova.compute.provider_tree [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.580907] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 4b8f38f8d3974603a1b4c40d96c17551 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.589071] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b8f38f8d3974603a1b4c40d96c17551 [ 1237.589932] env[62740]: DEBUG nova.scheduler.client.report [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1237.592701] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg d44d8c979e364386a5a93d3cd82967da in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.607137] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d44d8c979e364386a5a93d3cd82967da [ 1237.608472] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.411s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.609117] env[62740]: DEBUG nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1237.610946] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 47c27ca78944433e97b91a411bd6e581 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.646100] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47c27ca78944433e97b91a411bd6e581 [ 1237.646100] env[62740]: DEBUG nova.compute.utils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1237.647483] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 44cadce8194f494abf72a58b8c9c54a8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.649398] env[62740]: DEBUG nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1237.652820] env[62740]: DEBUG nova.network.neutron [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1237.658991] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44cadce8194f494abf72a58b8c9c54a8 [ 1237.659298] env[62740]: DEBUG nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1237.661088] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg cbb6f13a4b014328af58572852631802 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.694550] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbb6f13a4b014328af58572852631802 [ 1237.697442] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg e3a6ef8e3551423d89c8d81a4c3403c8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.733241] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3a6ef8e3551423d89c8d81a4c3403c8 [ 1237.734975] env[62740]: DEBUG nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1237.761037] env[62740]: DEBUG nova.policy [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae210d0a728b4176b3a86769052331bf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '638caa0c37e04221afe8f6f207d1d730', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1237.764598] env[62740]: DEBUG nova.virt.hardware [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1237.764721] env[62740]: DEBUG nova.virt.hardware [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1237.764920] env[62740]: DEBUG nova.virt.hardware [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1237.765045] env[62740]: DEBUG nova.virt.hardware [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1237.765188] env[62740]: DEBUG nova.virt.hardware [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1237.765337] env[62740]: DEBUG nova.virt.hardware [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1237.765587] env[62740]: DEBUG nova.virt.hardware [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1237.765774] env[62740]: DEBUG nova.virt.hardware [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1237.765950] env[62740]: DEBUG nova.virt.hardware [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1237.766130] env[62740]: DEBUG nova.virt.hardware [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1237.766305] env[62740]: DEBUG nova.virt.hardware [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1237.770804] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978fa9ed-ccf5-460b-a90e-e1ec984e16cd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.776937] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65da206-8bcb-47a6-9735-7b8fb6a67e94 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.943457] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg c60641491b6f4c65a341ac547ba278a6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1237.956864] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c60641491b6f4c65a341ac547ba278a6 [ 1237.957363] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Acquiring lock "43e4ddf4-230e-49f7-975f-ba99a6da9398" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1238.239919] env[62740]: DEBUG nova.network.neutron [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Successfully created port: 989d73a2-e885-488a-8ba5-f7d5d6067ce5 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1239.079965] env[62740]: DEBUG nova.compute.manager [req-188f16db-7df5-41c6-83ba-5d51192f8265 req-e8d80a93-930a-4f60-99a0-d38d52af07d4 service nova] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Received event network-vif-plugged-989d73a2-e885-488a-8ba5-f7d5d6067ce5 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1239.080236] env[62740]: DEBUG oslo_concurrency.lockutils [req-188f16db-7df5-41c6-83ba-5d51192f8265 req-e8d80a93-930a-4f60-99a0-d38d52af07d4 service nova] Acquiring lock "7aacf4e0-b508-4a18-909a-3d1fe9458d98-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.080415] env[62740]: DEBUG oslo_concurrency.lockutils [req-188f16db-7df5-41c6-83ba-5d51192f8265 req-e8d80a93-930a-4f60-99a0-d38d52af07d4 service nova] Lock "7aacf4e0-b508-4a18-909a-3d1fe9458d98-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.080589] env[62740]: DEBUG oslo_concurrency.lockutils [req-188f16db-7df5-41c6-83ba-5d51192f8265 req-e8d80a93-930a-4f60-99a0-d38d52af07d4 service nova] Lock "7aacf4e0-b508-4a18-909a-3d1fe9458d98-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.080778] env[62740]: DEBUG nova.compute.manager [req-188f16db-7df5-41c6-83ba-5d51192f8265 req-e8d80a93-930a-4f60-99a0-d38d52af07d4 service nova] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] No waiting events found dispatching network-vif-plugged-989d73a2-e885-488a-8ba5-f7d5d6067ce5 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1239.080906] env[62740]: WARNING nova.compute.manager [req-188f16db-7df5-41c6-83ba-5d51192f8265 req-e8d80a93-930a-4f60-99a0-d38d52af07d4 service nova] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Received unexpected event network-vif-plugged-989d73a2-e885-488a-8ba5-f7d5d6067ce5 for instance with vm_state building and task_state spawning. [ 1239.145781] env[62740]: DEBUG nova.network.neutron [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Successfully updated port: 989d73a2-e885-488a-8ba5-f7d5d6067ce5 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1239.146532] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 1c78c56e446e4b56bf1500dd6bccde9b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1239.160140] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c78c56e446e4b56bf1500dd6bccde9b [ 1239.164951] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Acquiring lock "refresh_cache-7aacf4e0-b508-4a18-909a-3d1fe9458d98" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1239.165255] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Acquired lock "refresh_cache-7aacf4e0-b508-4a18-909a-3d1fe9458d98" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.165590] env[62740]: DEBUG nova.network.neutron [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1239.170023] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 29db5f38767d473cab117030a9227fd3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1239.176894] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29db5f38767d473cab117030a9227fd3 [ 1239.242457] env[62740]: DEBUG nova.network.neutron [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1239.758700] env[62740]: DEBUG nova.network.neutron [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Updating instance_info_cache with network_info: [{"id": "989d73a2-e885-488a-8ba5-f7d5d6067ce5", "address": "fa:16:3e:be:c0:be", "network": {"id": "0e6f870d-2dd5-482c-9f98-afc887c88d62", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-437309259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "638caa0c37e04221afe8f6f207d1d730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989d73a2-e8", "ovs_interfaceid": "989d73a2-e885-488a-8ba5-f7d5d6067ce5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.759509] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg c8aa5ac530734bada005f46a6505930a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1239.772551] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8aa5ac530734bada005f46a6505930a [ 1239.773357] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Releasing lock "refresh_cache-7aacf4e0-b508-4a18-909a-3d1fe9458d98" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1239.774257] env[62740]: DEBUG nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Instance network_info: |[{"id": "989d73a2-e885-488a-8ba5-f7d5d6067ce5", "address": "fa:16:3e:be:c0:be", "network": {"id": "0e6f870d-2dd5-482c-9f98-afc887c88d62", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-437309259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "638caa0c37e04221afe8f6f207d1d730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989d73a2-e8", "ovs_interfaceid": "989d73a2-e885-488a-8ba5-f7d5d6067ce5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1239.778019] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:c0:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e028024-a9c1-4cae-8849-ea770a7ae0e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '989d73a2-e885-488a-8ba5-f7d5d6067ce5', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1239.785523] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Creating folder: Project (638caa0c37e04221afe8f6f207d1d730). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1239.786568] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-56e1275e-3435-4919-bde4-19dad2eebdb2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.798632] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Created folder: Project (638caa0c37e04221afe8f6f207d1d730) in parent group-v156037. [ 1239.799012] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Creating folder: Instances. Parent ref: group-v156142. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1239.799404] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6ef718c-1530-4c36-89fa-4051e0ef12d9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.809119] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Created folder: Instances in parent group-v156142. [ 1239.809471] env[62740]: DEBUG oslo.service.loopingcall [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1239.809703] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1239.810077] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67a129bc-f99e-4a7e-ba14-2a5d5cda7402 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.832156] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1239.832156] env[62740]: value = "task-640242" [ 1239.832156] env[62740]: _type = "Task" [ 1239.832156] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.840540] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640242, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.342931] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640242, 'name': CreateVM_Task, 'duration_secs': 0.324066} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.343377] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1240.344106] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.344325] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.344724] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1240.344987] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4daf9e23-2095-4577-9d6d-a4cdbedc38f3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.351213] env[62740]: DEBUG oslo_vmware.api [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Waiting for the task: (returnval){ [ 1240.351213] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52c59785-c9b3-e996-5000-82daedfa6947" [ 1240.351213] env[62740]: _type = "Task" [ 1240.351213] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.359146] env[62740]: DEBUG oslo_vmware.api [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52c59785-c9b3-e996-5000-82daedfa6947, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.863729] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1240.864489] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1240.864489] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1241.119648] env[62740]: DEBUG nova.compute.manager [req-e6efe625-d120-4bd2-abf4-8d6cddc7bb2e req-b5bfd663-0125-4d06-b350-158fcc29aabc service nova] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Received event network-changed-989d73a2-e885-488a-8ba5-f7d5d6067ce5 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1241.119897] env[62740]: DEBUG nova.compute.manager [req-e6efe625-d120-4bd2-abf4-8d6cddc7bb2e req-b5bfd663-0125-4d06-b350-158fcc29aabc service nova] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Refreshing instance network info cache due to event network-changed-989d73a2-e885-488a-8ba5-f7d5d6067ce5. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1241.120159] env[62740]: DEBUG oslo_concurrency.lockutils [req-e6efe625-d120-4bd2-abf4-8d6cddc7bb2e req-b5bfd663-0125-4d06-b350-158fcc29aabc service nova] Acquiring lock "refresh_cache-7aacf4e0-b508-4a18-909a-3d1fe9458d98" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1241.120344] env[62740]: DEBUG oslo_concurrency.lockutils [req-e6efe625-d120-4bd2-abf4-8d6cddc7bb2e req-b5bfd663-0125-4d06-b350-158fcc29aabc service nova] Acquired lock "refresh_cache-7aacf4e0-b508-4a18-909a-3d1fe9458d98" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.120547] env[62740]: DEBUG nova.network.neutron [req-e6efe625-d120-4bd2-abf4-8d6cddc7bb2e req-b5bfd663-0125-4d06-b350-158fcc29aabc service nova] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Refreshing network info cache for port 989d73a2-e885-488a-8ba5-f7d5d6067ce5 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1241.121062] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-e6efe625-d120-4bd2-abf4-8d6cddc7bb2e req-b5bfd663-0125-4d06-b350-158fcc29aabc service nova] Expecting reply to msg 3d966bd6b96243f0ac527d19cad09829 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1241.135121] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d966bd6b96243f0ac527d19cad09829 [ 1241.559197] env[62740]: DEBUG nova.network.neutron [req-e6efe625-d120-4bd2-abf4-8d6cddc7bb2e req-b5bfd663-0125-4d06-b350-158fcc29aabc service nova] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Updated VIF entry in instance network info cache for port 989d73a2-e885-488a-8ba5-f7d5d6067ce5. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1241.559656] env[62740]: DEBUG nova.network.neutron [req-e6efe625-d120-4bd2-abf4-8d6cddc7bb2e req-b5bfd663-0125-4d06-b350-158fcc29aabc service nova] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Updating instance_info_cache with network_info: [{"id": "989d73a2-e885-488a-8ba5-f7d5d6067ce5", "address": "fa:16:3e:be:c0:be", "network": {"id": "0e6f870d-2dd5-482c-9f98-afc887c88d62", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-437309259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "638caa0c37e04221afe8f6f207d1d730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989d73a2-e8", "ovs_interfaceid": "989d73a2-e885-488a-8ba5-f7d5d6067ce5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.560149] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-e6efe625-d120-4bd2-abf4-8d6cddc7bb2e req-b5bfd663-0125-4d06-b350-158fcc29aabc service nova] Expecting reply to msg fde5762eb68f4057a7f56610df44df94 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1241.570052] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fde5762eb68f4057a7f56610df44df94 [ 1241.570699] env[62740]: DEBUG oslo_concurrency.lockutils [req-e6efe625-d120-4bd2-abf4-8d6cddc7bb2e req-b5bfd663-0125-4d06-b350-158fcc29aabc service nova] Releasing lock "refresh_cache-7aacf4e0-b508-4a18-909a-3d1fe9458d98" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1242.891645] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg ce9744f671324ddeb6dbc2f7203f08c6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1242.908280] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce9744f671324ddeb6dbc2f7203f08c6 [ 1242.909131] env[62740]: DEBUG oslo_concurrency.lockutils [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Acquiring lock "eba85edb-4d86-42c9-8b49-98f2173a3eeb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.437153] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Acquiring lock "388d71f2-b229-4666-a53d-d5b07e498eed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.437412] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Lock "388d71f2-b229-4666-a53d-d5b07e498eed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.891735] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.887030] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.182258] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 137c364d31854a6284a733b240d72b0f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1250.191920] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 137c364d31854a6284a733b240d72b0f [ 1250.890746] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1251.891256] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1252.477189] env[62740]: DEBUG oslo_concurrency.lockutils [None req-195f8702-2a8b-4af1-beac-c4bffc17d311 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Acquiring lock "58ae8579-4ea3-45ba-a982-10e0ca82874c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.477799] env[62740]: DEBUG oslo_concurrency.lockutils [None req-195f8702-2a8b-4af1-beac-c4bffc17d311 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Lock "58ae8579-4ea3-45ba-a982-10e0ca82874c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.889972] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1252.890191] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1252.890318] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1252.890933] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg a26f1c686b1e48a5b374eb9bd68b142d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1252.908374] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a26f1c686b1e48a5b374eb9bd68b142d [ 1252.910691] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 5f57389d-853e-4439-872a-8345664578d0] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1252.910840] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1252.910973] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1252.911113] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1252.911238] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 158406db-7196-4826-aefa-20a58daa186b] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1252.911359] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1252.911479] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1252.911597] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1252.911712] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1252.911828] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1252.911945] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1252.912410] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1252.912591] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1252.912957] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg db20446d32c140008d646f994596bc58 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1252.922751] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db20446d32c140008d646f994596bc58 [ 1252.923611] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.923820] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.923983] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.924148] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1252.925257] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40d630d-2edd-4535-a169-fe5dee155571 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.933884] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d5c5e3-1edd-459e-9705-c93b79f2c8ef {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.948592] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c13b1c-20db-48d5-a78c-71e95b4636de {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.954639] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b2ff4f-44d0-4143-a24c-4d2bded29b3e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.982142] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181569MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1252.982290] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.982483] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.983386] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f51da63b048c4c7dbbfbedbf5de6bdb3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1253.018270] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f51da63b048c4c7dbbfbedbf5de6bdb3 [ 1253.022290] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 6a6e5b9463b04843aa409be46a17144a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1253.031514] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a6e5b9463b04843aa409be46a17144a [ 1253.052287] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 5f57389d-853e-4439-872a-8345664578d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.052438] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 472cd209-4192-4473-b788-d1ea342653bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.052570] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d8dac9af-0897-4fbf-8ee6-1fb3955d48c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.052693] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d6c3ca16-5c7c-41e6-9850-10221603ad2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.052815] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 158406db-7196-4826-aefa-20a58daa186b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.052930] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b0b16f66-8dbc-4e9b-a932-5de45215cfff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.053057] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 56106517-e735-4bf5-8d5a-dc0d4aab3991 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.053176] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 43e4ddf4-230e-49f7-975f-ba99a6da9398 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.053294] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance eba85edb-4d86-42c9-8b49-98f2173a3eeb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.053411] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 7aacf4e0-b508-4a18-909a-3d1fe9458d98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.053943] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 003070ee044846b99dbec7b8d575427e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1253.063454] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 003070ee044846b99dbec7b8d575427e [ 1253.064237] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6005c9dc-3067-4719-a8f9-befb63f7cd8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.064728] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8d8a5b17a5fc47bfbba3078f13b462b4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1253.074040] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d8a5b17a5fc47bfbba3078f13b462b4 [ 1253.074678] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance ba23ede2-be42-48ac-b281-571ccd158dee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.075193] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg a5390e6abfb941a6a420815c216ea24a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1253.084061] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5390e6abfb941a6a420815c216ea24a [ 1253.084696] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.085172] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8f11df6ad887466db6cddbad438ba314 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1253.093821] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f11df6ad887466db6cddbad438ba314 [ 1253.094439] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2deff09f-d24f-4609-91f2-1585e8407c2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.094879] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg c028e791f6fc4f6ebe754838d52ccfc3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1253.103403] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c028e791f6fc4f6ebe754838d52ccfc3 [ 1253.103987] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3f36f081-2851-4339-860d-0a302ef4ee2c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.104421] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 81d28cf9895044faa7c871382b492e96 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1253.112955] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81d28cf9895044faa7c871382b492e96 [ 1253.113541] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 913ddb91-9d46-459e-8775-c9f380ed3cc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.114103] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 01e6836cdd0f41fdaeab1efddca55706 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1253.122040] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01e6836cdd0f41fdaeab1efddca55706 [ 1253.122597] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 07efd13e-40d0-4158-b17c-6f5c75474ce3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.123015] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2bbda5cf564d447ca243f5c652916cef in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1253.131273] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bbda5cf564d447ca243f5c652916cef [ 1253.131848] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 8a156903-e4cf-43ed-9c6a-962a06ff9ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.132261] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8db32ab0cb794cada48d28e6a37eb48c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1253.141037] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8db32ab0cb794cada48d28e6a37eb48c [ 1253.141594] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388d71f2-b229-4666-a53d-d5b07e498eed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.141993] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg c6d328dd374f4b31a1356ad042e2ff00 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1253.150973] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6d328dd374f4b31a1356ad042e2ff00 [ 1253.151526] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 58ae8579-4ea3-45ba-a982-10e0ca82874c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.151744] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1253.151889] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1253.369960] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43bbdbd-bc11-4352-96fc-8f8b3c4d3580 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.377468] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c0faed-4302-4932-963a-7b72c3ac9f08 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.408439] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb143d13-6649-46a0-b277-5329c3040f14 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.415990] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd142268-6fbf-4a7a-a361-db44055babdf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.428928] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1253.429410] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 155a3fa1f27843c5a7019eb3dd4867c4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1253.436358] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 155a3fa1f27843c5a7019eb3dd4867c4 [ 1253.437247] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1253.439674] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg facf5c45f1674305b814bb5e7ee9e7ee in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1253.450771] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg facf5c45f1674305b814bb5e7ee9e7ee [ 1253.451403] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1253.451588] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.469s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1254.429345] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1254.890675] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1254.891847] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1256.887019] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.887445] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 9ecee4cf44624495a51542bc405e50f4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1256.906873] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ecee4cf44624495a51542bc405e50f4 [ 1257.237852] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 1186ef497b214ee99b8e2658dc82f50d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1257.249250] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1186ef497b214ee99b8e2658dc82f50d [ 1257.249794] env[62740]: DEBUG oslo_concurrency.lockutils [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Acquiring lock "7aacf4e0-b508-4a18-909a-3d1fe9458d98" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.293055] env[62740]: WARNING oslo_vmware.rw_handles [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1266.293055] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1266.293055] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1266.293055] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1266.293055] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1266.293055] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1266.293055] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1266.293055] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1266.293055] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1266.293055] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1266.293055] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1266.293055] env[62740]: ERROR oslo_vmware.rw_handles [ 1266.293055] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/7cc9fcd2-21a4-43f9-b555-6bd80fa2522a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1266.295323] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1266.295571] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Copying Virtual Disk [datastore2] vmware_temp/7cc9fcd2-21a4-43f9-b555-6bd80fa2522a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/7cc9fcd2-21a4-43f9-b555-6bd80fa2522a/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1266.295876] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6fe38e5a-e04c-4072-bc12-162c8d8efd6e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.307597] env[62740]: DEBUG oslo_vmware.api [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for the task: (returnval){ [ 1266.307597] env[62740]: value = "task-640243" [ 1266.307597] env[62740]: _type = "Task" [ 1266.307597] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.315164] env[62740]: DEBUG oslo_vmware.api [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Task: {'id': task-640243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.818897] env[62740]: DEBUG oslo_vmware.exceptions [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1266.819279] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1266.819974] env[62740]: ERROR nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1266.819974] env[62740]: Faults: ['InvalidArgument'] [ 1266.819974] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Traceback (most recent call last): [ 1266.819974] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1266.819974] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] yield resources [ 1266.819974] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1266.819974] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] self.driver.spawn(context, instance, image_meta, [ 1266.819974] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1266.819974] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1266.819974] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1266.819974] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] self._fetch_image_if_missing(context, vi) [ 1266.819974] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1266.820327] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] image_cache(vi, tmp_image_ds_loc) [ 1266.820327] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1266.820327] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] vm_util.copy_virtual_disk( [ 1266.820327] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1266.820327] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] session._wait_for_task(vmdk_copy_task) [ 1266.820327] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1266.820327] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] return self.wait_for_task(task_ref) [ 1266.820327] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1266.820327] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] return evt.wait() [ 1266.820327] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1266.820327] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] result = hub.switch() [ 1266.820327] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1266.820327] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] return self.greenlet.switch() [ 1266.820657] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1266.820657] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] self.f(*self.args, **self.kw) [ 1266.820657] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1266.820657] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] raise exceptions.translate_fault(task_info.error) [ 1266.820657] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1266.820657] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Faults: ['InvalidArgument'] [ 1266.820657] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] [ 1266.820657] env[62740]: INFO nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Terminating instance [ 1266.821881] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1266.822103] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1266.822347] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-885798ae-0800-4175-a283-32887dcb1299 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.824513] env[62740]: DEBUG nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1266.824711] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1266.825444] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b5d0ef-15e7-42c0-841b-d7ed282b00c3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.832339] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1266.832562] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a99866ff-63ac-4cd7-b7bb-4b6d46d8df63 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.834728] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1266.834902] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1266.835880] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f02e7857-6319-4228-b0d5-685ccc4e7bae {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.840456] env[62740]: DEBUG oslo_vmware.api [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 1266.840456] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52792410-9104-eb28-3465-b7b6d7225427" [ 1266.840456] env[62740]: _type = "Task" [ 1266.840456] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.847595] env[62740]: DEBUG oslo_vmware.api [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52792410-9104-eb28-3465-b7b6d7225427, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.902417] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1266.902852] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1266.903074] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Deleting the datastore file [datastore2] b0b16f66-8dbc-4e9b-a932-5de45215cfff {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1266.903347] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ca92711-9546-4fca-9073-aa201b00b719 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.910028] env[62740]: DEBUG oslo_vmware.api [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for the task: (returnval){ [ 1266.910028] env[62740]: value = "task-640245" [ 1266.910028] env[62740]: _type = "Task" [ 1266.910028] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.917598] env[62740]: DEBUG oslo_vmware.api [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Task: {'id': task-640245, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.350490] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1267.350776] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating directory with path [datastore2] vmware_temp/29a34274-7c9e-428f-896e-75961cb931c4/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1267.351507] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f93128ec-896c-49d8-8d66-086554c1c24d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.363698] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Created directory with path [datastore2] vmware_temp/29a34274-7c9e-428f-896e-75961cb931c4/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1267.363904] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Fetch image to [datastore2] vmware_temp/29a34274-7c9e-428f-896e-75961cb931c4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1267.364092] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/29a34274-7c9e-428f-896e-75961cb931c4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1267.364837] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a474432e-f92a-4f42-8a71-afa8f3c6ce5f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.371599] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40445f5-9e34-4a05-a38a-02a81a7cc6c0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.380365] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47df4852-1339-4f53-a2b3-daca4e24590f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.409251] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccda02ac-fea0-48ae-8194-f7771e694921 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.420395] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7d275999-fd1e-4b74-894c-a69544ec3063 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.422032] env[62740]: DEBUG oslo_vmware.api [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Task: {'id': task-640245, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080774} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.422272] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1267.422451] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1267.422623] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1267.422794] env[62740]: INFO nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1267.424809] env[62740]: DEBUG nova.compute.claims [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1267.424985] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.425211] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1267.427558] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg bebbe41fa7a64463ab3fedf7e58f3f8a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1267.442540] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1267.466382] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bebbe41fa7a64463ab3fedf7e58f3f8a [ 1267.498021] env[62740]: DEBUG oslo_vmware.rw_handles [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/29a34274-7c9e-428f-896e-75961cb931c4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1267.560493] env[62740]: DEBUG oslo_vmware.rw_handles [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1267.560703] env[62740]: DEBUG oslo_vmware.rw_handles [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/29a34274-7c9e-428f-896e-75961cb931c4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1267.765292] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c901ec9-cc17-4550-a1df-a32ba676b43d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.773848] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bacba44f-abf2-4d26-a6d8-f993eb96ab2d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.803357] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa79abc4-0983-479b-b47e-7b2afc768871 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.810172] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ed679b-2bf0-4324-ad09-7ed85ebc5abf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.822637] env[62740]: DEBUG nova.compute.provider_tree [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1267.823169] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 6a6bc10cacf74ed790fb57faa0a82980 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1267.831416] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a6bc10cacf74ed790fb57faa0a82980 [ 1267.832324] env[62740]: DEBUG nova.scheduler.client.report [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1267.834574] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 3e199d97286d4a369ba0a4825261f242 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1267.846697] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e199d97286d4a369ba0a4825261f242 [ 1267.847408] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.422s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1267.847921] env[62740]: ERROR nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1267.847921] env[62740]: Faults: ['InvalidArgument'] [ 1267.847921] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Traceback (most recent call last): [ 1267.847921] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1267.847921] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] self.driver.spawn(context, instance, image_meta, [ 1267.847921] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1267.847921] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1267.847921] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1267.847921] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] self._fetch_image_if_missing(context, vi) [ 1267.847921] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1267.847921] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] image_cache(vi, tmp_image_ds_loc) [ 1267.847921] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1267.848250] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] vm_util.copy_virtual_disk( [ 1267.848250] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1267.848250] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] session._wait_for_task(vmdk_copy_task) [ 1267.848250] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1267.848250] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] return self.wait_for_task(task_ref) [ 1267.848250] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1267.848250] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] return evt.wait() [ 1267.848250] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1267.848250] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] result = hub.switch() [ 1267.848250] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1267.848250] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] return self.greenlet.switch() [ 1267.848250] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1267.848250] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] self.f(*self.args, **self.kw) [ 1267.848602] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1267.848602] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] raise exceptions.translate_fault(task_info.error) [ 1267.848602] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1267.848602] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Faults: ['InvalidArgument'] [ 1267.848602] env[62740]: ERROR nova.compute.manager [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] [ 1267.848735] env[62740]: DEBUG nova.compute.utils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1267.850411] env[62740]: DEBUG nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Build of instance b0b16f66-8dbc-4e9b-a932-5de45215cfff was re-scheduled: A specified parameter was not correct: fileType [ 1267.850411] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1267.850795] env[62740]: DEBUG nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1267.850966] env[62740]: DEBUG nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1267.851859] env[62740]: DEBUG nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1267.851859] env[62740]: DEBUG nova.network.neutron [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1268.383308] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg f67761c6dced45eda60d577441b911ce in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.393726] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f67761c6dced45eda60d577441b911ce [ 1268.394351] env[62740]: DEBUG nova.network.neutron [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.394864] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 68213ec440114cb4984c65fb5fcb7fd9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.408161] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68213ec440114cb4984c65fb5fcb7fd9 [ 1268.408756] env[62740]: INFO nova.compute.manager [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Took 0.56 seconds to deallocate network for instance. [ 1268.410685] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg d1dcbf5f32ba4311a6106952de464043 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.448827] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1dcbf5f32ba4311a6106952de464043 [ 1268.451962] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 187f36ef167d4e19b1903f4da4bfa368 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.487380] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 187f36ef167d4e19b1903f4da4bfa368 [ 1268.514491] env[62740]: INFO nova.scheduler.client.report [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Deleted allocations for instance b0b16f66-8dbc-4e9b-a932-5de45215cfff [ 1268.521025] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg f1107e6c0fa1468aaa73a5c63abe2efd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.536803] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1107e6c0fa1468aaa73a5c63abe2efd [ 1268.537600] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4200f31e-1342-4948-9e07-ca771566351c tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "b0b16f66-8dbc-4e9b-a932-5de45215cfff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 273.099s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1268.538323] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 4084f5022a654fcbbc10a46403e1c44c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.538732] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "b0b16f66-8dbc-4e9b-a932-5de45215cfff" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 86.279s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1268.538939] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] During sync_power_state the instance has a pending task (spawning). Skip. [ 1268.539142] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "b0b16f66-8dbc-4e9b-a932-5de45215cfff" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1268.539839] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "b0b16f66-8dbc-4e9b-a932-5de45215cfff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 77.083s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1268.540109] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "b0b16f66-8dbc-4e9b-a932-5de45215cfff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1268.540349] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "b0b16f66-8dbc-4e9b-a932-5de45215cfff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1268.540642] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "b0b16f66-8dbc-4e9b-a932-5de45215cfff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1268.542378] env[62740]: INFO nova.compute.manager [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Terminating instance [ 1268.544283] env[62740]: DEBUG nova.compute.manager [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1268.544476] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1268.544744] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5cd60fe5-d992-4749-802d-0da381345e82 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.549148] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4084f5022a654fcbbc10a46403e1c44c [ 1268.549655] env[62740]: DEBUG nova.compute.manager [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1268.551291] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 0a845e11efc943b3802501f6c2ab41e2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.556108] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d453c0a4-f8e2-4b4e-83a3-9a5fe73102c9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.584630] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b0b16f66-8dbc-4e9b-a932-5de45215cfff could not be found. [ 1268.584714] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1268.584921] env[62740]: INFO nova.compute.manager [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1268.585612] env[62740]: DEBUG oslo.service.loopingcall [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1268.585924] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a845e11efc943b3802501f6c2ab41e2 [ 1268.586271] env[62740]: DEBUG nova.compute.manager [-] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1268.586390] env[62740]: DEBUG nova.network.neutron [-] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1268.602711] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1268.602953] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1268.604354] env[62740]: INFO nova.compute.claims [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1268.605871] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg ef120726150a4189b06fcbf744604d67 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.622444] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fe69c1ec9ca7476c871067c7a5587ae0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.628530] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe69c1ec9ca7476c871067c7a5587ae0 [ 1268.628853] env[62740]: DEBUG nova.network.neutron [-] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.629210] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg caef6ac152864e288edb32a98cdd86d4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.639049] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg caef6ac152864e288edb32a98cdd86d4 [ 1268.639049] env[62740]: INFO nova.compute.manager [-] [instance: b0b16f66-8dbc-4e9b-a932-5de45215cfff] Took 0.05 seconds to deallocate network for instance. [ 1268.642143] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 4ec37f048d9a41c0a9f74e843e976d88 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.643192] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef120726150a4189b06fcbf744604d67 [ 1268.644833] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 93162a8eb7d74e42a4f4a62c8f1f92e8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.653750] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93162a8eb7d74e42a4f4a62c8f1f92e8 [ 1268.668392] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ec37f048d9a41c0a9f74e843e976d88 [ 1268.682958] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 01530b5b0bd14c56b21fc06665b95ea0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.733403] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01530b5b0bd14c56b21fc06665b95ea0 [ 1268.735113] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "b0b16f66-8dbc-4e9b-a932-5de45215cfff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.194s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1268.735113] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6c03c523-8e4c-4516-8e5c-a6507b2f73a1 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 39a5c27075f549898efd5f7a542715be in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.744667] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39a5c27075f549898efd5f7a542715be [ 1268.877571] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1b3157-8357-458b-be10-4184135b4ddc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.885415] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b54ddc-d3be-45a0-b761-00df2d7d6002 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.914915] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ade8fb-f9c1-4680-917a-289c2c6725ec {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.921684] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456c976c-e8e9-41b9-b461-1ca7db216f50 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.934242] env[62740]: DEBUG nova.compute.provider_tree [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.934739] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg b02e700b3cab4c608d8a053510090aec in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.942209] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b02e700b3cab4c608d8a053510090aec [ 1268.943096] env[62740]: DEBUG nova.scheduler.client.report [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1268.945334] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 2e552d8126ef454aa61f2eaed56d81ed in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.955880] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e552d8126ef454aa61f2eaed56d81ed [ 1268.956622] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.354s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1268.957101] env[62740]: DEBUG nova.compute.manager [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1268.958694] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg d8870d8059d749658b90a29fa9cba91d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.987894] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8870d8059d749658b90a29fa9cba91d [ 1268.989577] env[62740]: DEBUG nova.compute.utils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1268.990271] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 65b6f99ecb30435ab55d0e6d8250d09c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1268.991270] env[62740]: DEBUG nova.compute.manager [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Not allocating networking since 'none' was specified. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1268.998884] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65b6f99ecb30435ab55d0e6d8250d09c [ 1268.999447] env[62740]: DEBUG nova.compute.manager [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1269.001058] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg cc0886783f554f6cb0e7b5d7e78a708b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1269.029096] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc0886783f554f6cb0e7b5d7e78a708b [ 1269.031783] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 7643e568636e474f9ba0574cfffd8a0b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1269.061482] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7643e568636e474f9ba0574cfffd8a0b [ 1269.062783] env[62740]: DEBUG nova.compute.manager [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1269.088391] env[62740]: DEBUG nova.virt.hardware [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1269.088748] env[62740]: DEBUG nova.virt.hardware [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1269.088923] env[62740]: DEBUG nova.virt.hardware [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1269.089127] env[62740]: DEBUG nova.virt.hardware [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1269.089280] env[62740]: DEBUG nova.virt.hardware [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1269.089439] env[62740]: DEBUG nova.virt.hardware [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1269.089710] env[62740]: DEBUG nova.virt.hardware [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1269.089882] env[62740]: DEBUG nova.virt.hardware [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1269.090068] env[62740]: DEBUG nova.virt.hardware [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1269.090241] env[62740]: DEBUG nova.virt.hardware [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1269.090417] env[62740]: DEBUG nova.virt.hardware [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1269.091285] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c306649f-d12a-4f38-9390-7aac4432492e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.099891] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1baed3fb-afb5-41af-80e1-673144da6244 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.112849] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Instance VIF info [] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1269.118353] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Creating folder: Project (d5e8303d419f47d293c919834a980fa1). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1269.118631] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9008c3c8-e282-4c29-b56c-040fde30b063 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.127755] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Created folder: Project (d5e8303d419f47d293c919834a980fa1) in parent group-v156037. [ 1269.127955] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Creating folder: Instances. Parent ref: group-v156145. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1269.128181] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-929aa66e-e4b4-4c24-a307-0caba94b7927 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.135395] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Created folder: Instances in parent group-v156145. [ 1269.135615] env[62740]: DEBUG oslo.service.loopingcall [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1269.135817] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1269.135963] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7df78ca-4361-4f89-9d17-7d82a821fe0b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.151046] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1269.151046] env[62740]: value = "task-640248" [ 1269.151046] env[62740]: _type = "Task" [ 1269.151046] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.157635] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640248, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.660966] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640248, 'name': CreateVM_Task, 'duration_secs': 0.233966} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.661216] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1269.661562] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1269.661726] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1269.662048] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1269.662291] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdee7352-7a6e-469c-a08b-5ed3697fba93 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.666481] env[62740]: DEBUG oslo_vmware.api [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Waiting for the task: (returnval){ [ 1269.666481] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52e3bb57-fdc1-6dfa-f2dc-c3cd0945e8a1" [ 1269.666481] env[62740]: _type = "Task" [ 1269.666481] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.674611] env[62740]: DEBUG oslo_vmware.api [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52e3bb57-fdc1-6dfa-f2dc-c3cd0945e8a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.177303] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1270.177518] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1270.177755] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1271.710263] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "c0daf074-eecb-4899-938f-477031efc6d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.710538] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "c0daf074-eecb-4899-938f-477031efc6d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.298488] env[62740]: WARNING oslo_vmware.rw_handles [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1281.298488] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1281.298488] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1281.298488] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1281.298488] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1281.298488] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1281.298488] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1281.298488] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1281.298488] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1281.298488] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1281.298488] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1281.298488] env[62740]: ERROR oslo_vmware.rw_handles [ 1281.299118] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/05237dbb-4d83-4ff4-857d-9906a1c55357/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1281.301067] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1281.301358] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Copying Virtual Disk [datastore1] vmware_temp/05237dbb-4d83-4ff4-857d-9906a1c55357/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/05237dbb-4d83-4ff4-857d-9906a1c55357/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1281.301655] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c2d3eee-9307-4ec9-9802-4aacb4c07524 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.309177] env[62740]: DEBUG oslo_vmware.api [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for the task: (returnval){ [ 1281.309177] env[62740]: value = "task-640249" [ 1281.309177] env[62740]: _type = "Task" [ 1281.309177] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.317202] env[62740]: DEBUG oslo_vmware.api [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Task: {'id': task-640249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.819653] env[62740]: DEBUG oslo_vmware.exceptions [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1281.819999] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.820604] env[62740]: ERROR nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1281.820604] env[62740]: Faults: ['InvalidArgument'] [ 1281.820604] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] Traceback (most recent call last): [ 1281.820604] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1281.820604] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] yield resources [ 1281.820604] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1281.820604] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] self.driver.spawn(context, instance, image_meta, [ 1281.820604] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1281.820604] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1281.820604] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1281.820604] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] self._fetch_image_if_missing(context, vi) [ 1281.820604] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1281.820604] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] image_cache(vi, tmp_image_ds_loc) [ 1281.821012] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1281.821012] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] vm_util.copy_virtual_disk( [ 1281.821012] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1281.821012] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] session._wait_for_task(vmdk_copy_task) [ 1281.821012] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1281.821012] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] return self.wait_for_task(task_ref) [ 1281.821012] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1281.821012] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] return evt.wait() [ 1281.821012] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1281.821012] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] result = hub.switch() [ 1281.821012] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1281.821012] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] return self.greenlet.switch() [ 1281.821012] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1281.821362] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] self.f(*self.args, **self.kw) [ 1281.821362] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1281.821362] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] raise exceptions.translate_fault(task_info.error) [ 1281.821362] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1281.821362] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] Faults: ['InvalidArgument'] [ 1281.821362] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] [ 1281.821362] env[62740]: INFO nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Terminating instance [ 1281.822746] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.822960] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1281.823207] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4855c54-4f94-4a76-8f61-137b892c096a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.825505] env[62740]: DEBUG nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1281.825701] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1281.826409] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723ed40d-bc9e-48d4-8e68-8c3f9fa94c36 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.833353] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1281.833555] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fab4b859-9b2d-4fc5-b686-378fce68fbdd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.835549] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1281.835720] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1281.836672] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ca98d4a-6501-47be-b5e4-42fc218d68ca {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.841062] env[62740]: DEBUG oslo_vmware.api [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Waiting for the task: (returnval){ [ 1281.841062] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]525f2889-e23d-c6d0-88c4-68af2bc432fe" [ 1281.841062] env[62740]: _type = "Task" [ 1281.841062] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.852143] env[62740]: DEBUG oslo_vmware.api [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]525f2889-e23d-c6d0-88c4-68af2bc432fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.943063] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1281.943203] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1281.943324] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Deleting the datastore file [datastore1] 5f57389d-853e-4439-872a-8345664578d0 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1281.943593] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-789ffe10-60ea-4fd7-b1bb-2b74c8afbf15 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.949603] env[62740]: DEBUG oslo_vmware.api [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for the task: (returnval){ [ 1281.949603] env[62740]: value = "task-640251" [ 1281.949603] env[62740]: _type = "Task" [ 1281.949603] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.957159] env[62740]: DEBUG oslo_vmware.api [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Task: {'id': task-640251, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.351659] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1282.351994] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Creating directory with path [datastore1] vmware_temp/1eb52734-d408-4866-b4e8-a91ca0e75903/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1282.352155] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e216483-ee6b-4087-a9a9-54cfb41b5558 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.363350] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Created directory with path [datastore1] vmware_temp/1eb52734-d408-4866-b4e8-a91ca0e75903/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1282.363533] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Fetch image to [datastore1] vmware_temp/1eb52734-d408-4866-b4e8-a91ca0e75903/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1282.363791] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/1eb52734-d408-4866-b4e8-a91ca0e75903/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1282.364406] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7924807c-2460-4905-a2f6-f67fddc4f31c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.370971] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e340d1f-cc2a-4558-a57f-cda091c81907 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.380149] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521a7b2f-1b47-475c-97dc-2c7275de2746 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.410733] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7efb8f12-743f-4874-9f50-57c4a0265a51 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.416512] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-162d233d-4369-4529-b6cd-25e0ec72d66d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.438289] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1282.459989] env[62740]: DEBUG oslo_vmware.api [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Task: {'id': task-640251, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081679} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.460245] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1282.460430] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1282.460706] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1282.460807] env[62740]: INFO nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1282.465365] env[62740]: DEBUG nova.compute.claims [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1282.465540] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.465751] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.467654] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg daf941ec83f7442980a0749f43e6d261 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1282.493338] env[62740]: DEBUG oslo_vmware.rw_handles [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1eb52734-d408-4866-b4e8-a91ca0e75903/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1282.549301] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg daf941ec83f7442980a0749f43e6d261 [ 1282.554631] env[62740]: DEBUG oslo_vmware.rw_handles [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1282.554851] env[62740]: DEBUG oslo_vmware.rw_handles [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1eb52734-d408-4866-b4e8-a91ca0e75903/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1282.768616] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0713cbde-073c-4f67-a012-66311dd7f238 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.776443] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d8948a-31fc-459c-9a95-7c68477577a3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.805520] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf41328-56ab-416c-a0f7-02717489bf8b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.812118] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2068aa48-4943-4df5-a483-55ba606d89c4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.826055] env[62740]: DEBUG nova.compute.provider_tree [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1282.826591] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg fe5967d582e24e7796584635b2eece67 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1282.833625] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe5967d582e24e7796584635b2eece67 [ 1282.834494] env[62740]: DEBUG nova.scheduler.client.report [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1282.836725] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg e906414672f740eb9627c4431169a980 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1282.849777] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e906414672f740eb9627c4431169a980 [ 1282.850719] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.385s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.851018] env[62740]: ERROR nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1282.851018] env[62740]: Faults: ['InvalidArgument'] [ 1282.851018] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] Traceback (most recent call last): [ 1282.851018] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1282.851018] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] self.driver.spawn(context, instance, image_meta, [ 1282.851018] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1282.851018] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1282.851018] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1282.851018] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] self._fetch_image_if_missing(context, vi) [ 1282.851018] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1282.851018] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] image_cache(vi, tmp_image_ds_loc) [ 1282.851018] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1282.851348] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] vm_util.copy_virtual_disk( [ 1282.851348] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1282.851348] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] session._wait_for_task(vmdk_copy_task) [ 1282.851348] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1282.851348] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] return self.wait_for_task(task_ref) [ 1282.851348] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1282.851348] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] return evt.wait() [ 1282.851348] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1282.851348] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] result = hub.switch() [ 1282.851348] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1282.851348] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] return self.greenlet.switch() [ 1282.851348] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1282.851348] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] self.f(*self.args, **self.kw) [ 1282.851635] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1282.851635] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] raise exceptions.translate_fault(task_info.error) [ 1282.851635] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1282.851635] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] Faults: ['InvalidArgument'] [ 1282.851635] env[62740]: ERROR nova.compute.manager [instance: 5f57389d-853e-4439-872a-8345664578d0] [ 1282.851745] env[62740]: DEBUG nova.compute.utils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1282.853085] env[62740]: DEBUG nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Build of instance 5f57389d-853e-4439-872a-8345664578d0 was re-scheduled: A specified parameter was not correct: fileType [ 1282.853085] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1282.853464] env[62740]: DEBUG nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1282.853639] env[62740]: DEBUG nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1282.853812] env[62740]: DEBUG nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1282.853977] env[62740]: DEBUG nova.network.neutron [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1283.174912] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg dccfea97b7874c1a897d75623a5a0786 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.187746] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dccfea97b7874c1a897d75623a5a0786 [ 1283.188151] env[62740]: DEBUG nova.network.neutron [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.189035] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 68fc1a39aec84b55acc2cab29fa6ccc1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.205285] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68fc1a39aec84b55acc2cab29fa6ccc1 [ 1283.205285] env[62740]: INFO nova.compute.manager [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Took 0.35 seconds to deallocate network for instance. [ 1283.206685] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 7c11c0781c204850a6b12a3723cf6d19 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.239375] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c11c0781c204850a6b12a3723cf6d19 [ 1283.242154] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg b4d747809fab4fa4bba26b953da101e2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.273816] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4d747809fab4fa4bba26b953da101e2 [ 1283.302867] env[62740]: INFO nova.scheduler.client.report [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Deleted allocations for instance 5f57389d-853e-4439-872a-8345664578d0 [ 1283.309175] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg fa9792e66af54e14b9b46b6b8776ec87 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.326749] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa9792e66af54e14b9b46b6b8776ec87 [ 1283.327428] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5d0e1452-ad4f-4308-800e-24a4de90b43f tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "5f57389d-853e-4439-872a-8345664578d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 453.054s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.328033] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 5afa996aed66444bb0f7708be238192c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.328958] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "5f57389d-853e-4439-872a-8345664578d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 255.966s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.329071] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "5f57389d-853e-4439-872a-8345664578d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.329256] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "5f57389d-853e-4439-872a-8345664578d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.329438] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "5f57389d-853e-4439-872a-8345664578d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.332184] env[62740]: INFO nova.compute.manager [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Terminating instance [ 1283.333998] env[62740]: DEBUG nova.compute.manager [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1283.334232] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1283.334777] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5207f48-755f-4697-bb65-1f519356880c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.342871] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5afa996aed66444bb0f7708be238192c [ 1283.344766] env[62740]: DEBUG nova.compute.manager [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1283.346646] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 05def02743cc45489555d803f0711171 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.352253] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0271f4-b36d-44dc-bb6b-13480d8774b2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.381801] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5f57389d-853e-4439-872a-8345664578d0 could not be found. [ 1283.382034] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1283.382219] env[62740]: INFO nova.compute.manager [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 5f57389d-853e-4439-872a-8345664578d0] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1283.382475] env[62740]: DEBUG oslo.service.loopingcall [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1283.382722] env[62740]: DEBUG nova.compute.manager [-] [instance: 5f57389d-853e-4439-872a-8345664578d0] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1283.382819] env[62740]: DEBUG nova.network.neutron [-] [instance: 5f57389d-853e-4439-872a-8345664578d0] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1283.400219] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05def02743cc45489555d803f0711171 [ 1283.417487] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.417733] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.419163] env[62740]: INFO nova.compute.claims [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1283.420994] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg f1d8630f072749aa9aaafa07ebc3568a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.422845] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fcd0936eec8b4f84872680fb2925a71d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.432604] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcd0936eec8b4f84872680fb2925a71d [ 1283.432907] env[62740]: DEBUG nova.network.neutron [-] [instance: 5f57389d-853e-4439-872a-8345664578d0] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.433271] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1f57654b24324bf8bfa65b21f9290edb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.471383] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f57654b24324bf8bfa65b21f9290edb [ 1283.471827] env[62740]: INFO nova.compute.manager [-] [instance: 5f57389d-853e-4439-872a-8345664578d0] Took 0.09 seconds to deallocate network for instance. [ 1283.475701] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg ec40fbb93811431a98315173456b544b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.477937] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1d8630f072749aa9aaafa07ebc3568a [ 1283.479561] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 85c65f71e2f54685affa4eae326c0bf8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.485333] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85c65f71e2f54685affa4eae326c0bf8 [ 1283.501368] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec40fbb93811431a98315173456b544b [ 1283.518930] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 20a44b6d69664e4fb409276daddbdd09 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.557031] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20a44b6d69664e4fb409276daddbdd09 [ 1283.559702] env[62740]: DEBUG oslo_concurrency.lockutils [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "5f57389d-853e-4439-872a-8345664578d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.231s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.559991] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-4fc6dbfb-a7f5-4377-bd42-860eece46bfc tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 7b91c75d51234e5ca0bc74cb588ab10f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.560649] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "5f57389d-853e-4439-872a-8345664578d0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 101.302s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.560836] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 5f57389d-853e-4439-872a-8345664578d0] During sync_power_state the instance has a pending task (deleting). Skip. [ 1283.561017] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "5f57389d-853e-4439-872a-8345664578d0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.570119] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b91c75d51234e5ca0bc74cb588ab10f [ 1283.702142] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8016c3a1-43ed-4032-9ad1-dab0c9e4bf1e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.709874] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8622ae3-cda7-4f67-b57c-7ad80b18441d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.738069] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f94d76-7e0e-48bd-bd57-9ee8d4673cdf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.744530] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fef3518-47f6-4622-8092-f45f72bdc4d3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.756861] env[62740]: DEBUG nova.compute.provider_tree [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1283.757367] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 1f97727cb31f42778245daa57d614eb2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.764863] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f97727cb31f42778245daa57d614eb2 [ 1283.765735] env[62740]: DEBUG nova.scheduler.client.report [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1283.767959] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 3843c38e89fc4667812146549d91ea6b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.779606] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3843c38e89fc4667812146549d91ea6b [ 1283.780300] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.363s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.780755] env[62740]: DEBUG nova.compute.manager [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1283.782359] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg ce9a0a183f8048f48be0bf9267118453 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.812384] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce9a0a183f8048f48be0bf9267118453 [ 1283.813942] env[62740]: DEBUG nova.compute.utils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1283.814549] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 4c770de218344c21b8c9d58ff8f73343 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.816121] env[62740]: DEBUG nova.compute.manager [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Not allocating networking since 'none' was specified. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1283.823928] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c770de218344c21b8c9d58ff8f73343 [ 1283.825019] env[62740]: DEBUG nova.compute.manager [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1283.826064] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 014a98ea3d204c9dad70e70ee7f4184d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.852286] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 014a98ea3d204c9dad70e70ee7f4184d [ 1283.855022] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 6afd0ed874b1413b8cfb4612a505f0a1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1283.882162] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6afd0ed874b1413b8cfb4612a505f0a1 [ 1283.883268] env[62740]: DEBUG nova.compute.manager [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1283.907925] env[62740]: DEBUG nova.virt.hardware [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1283.907925] env[62740]: DEBUG nova.virt.hardware [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1283.908110] env[62740]: DEBUG nova.virt.hardware [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1283.908183] env[62740]: DEBUG nova.virt.hardware [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1283.908333] env[62740]: DEBUG nova.virt.hardware [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1283.908552] env[62740]: DEBUG nova.virt.hardware [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1283.908778] env[62740]: DEBUG nova.virt.hardware [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1283.908939] env[62740]: DEBUG nova.virt.hardware [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1283.909128] env[62740]: DEBUG nova.virt.hardware [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1283.909296] env[62740]: DEBUG nova.virt.hardware [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1283.909472] env[62740]: DEBUG nova.virt.hardware [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1283.910318] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce191f1-ce1e-4961-a272-6d8619f980a7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.917633] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da242906-a8f3-4436-a23c-f535329c5453 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.931611] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Instance VIF info [] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1283.937139] env[62740]: DEBUG oslo.service.loopingcall [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1283.937350] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1283.937543] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a4d5fde-c6d7-4d71-9ed9-a02aefd5bb45 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.953357] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1283.953357] env[62740]: value = "task-640252" [ 1283.953357] env[62740]: _type = "Task" [ 1283.953357] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.960435] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640252, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.463235] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640252, 'name': CreateVM_Task, 'duration_secs': 0.234962} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.463532] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1284.463813] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1284.463974] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1284.464308] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1284.464557] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f15e5d0-7e4a-4e6d-9f0c-d47599a624c5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.469080] env[62740]: DEBUG oslo_vmware.api [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Waiting for the task: (returnval){ [ 1284.469080] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52165452-a34b-f977-649a-d0f1731284e6" [ 1284.469080] env[62740]: _type = "Task" [ 1284.469080] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.476999] env[62740]: DEBUG oslo_vmware.api [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52165452-a34b-f977-649a-d0f1731284e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.979207] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1284.979469] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1284.979708] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1297.626148] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquiring lock "61fea037-aac3-47ef-aa6a-5dfa657d840d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.626487] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Lock "61fea037-aac3-47ef-aa6a-5dfa657d840d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1310.890225] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.886408] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.890111] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1312.891335] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1312.891617] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1312.891736] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1312.892105] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ba6efb4cfda640629f37b91fe0a9131b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1312.902578] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba6efb4cfda640629f37b91fe0a9131b [ 1312.903731] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.903962] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.904143] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.904303] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1312.905527] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3d93fd-70d3-4be5-b552-09f028a8281e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.914272] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c94337-61f3-4b56-a0c6-50f4667c87ad {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.927897] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36805a9b-4b46-499e-aad1-36914d00dbc4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.934028] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906197fd-147d-4112-a151-176ce2ddc1c7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.963227] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181627MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1312.963372] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.963563] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.964427] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f12df5d10826421db891c3c62a199569 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1313.003696] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f12df5d10826421db891c3c62a199569 [ 1313.008863] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 934ef16913da4a65a1bfcd6abbe6dd25 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1313.018868] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 934ef16913da4a65a1bfcd6abbe6dd25 [ 1313.041160] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 472cd209-4192-4473-b788-d1ea342653bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.041314] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d8dac9af-0897-4fbf-8ee6-1fb3955d48c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.041440] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d6c3ca16-5c7c-41e6-9850-10221603ad2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.041564] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 158406db-7196-4826-aefa-20a58daa186b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.041685] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 56106517-e735-4bf5-8d5a-dc0d4aab3991 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.041803] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 43e4ddf4-230e-49f7-975f-ba99a6da9398 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.041918] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance eba85edb-4d86-42c9-8b49-98f2173a3eeb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.042045] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 7aacf4e0-b508-4a18-909a-3d1fe9458d98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.042165] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6005c9dc-3067-4719-a8f9-befb63f7cd8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.042281] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance ba23ede2-be42-48ac-b281-571ccd158dee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.042806] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0e5a873af45b4189b1b409493dd1629c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1313.052394] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e5a873af45b4189b1b409493dd1629c [ 1313.053089] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.053557] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 480bd7fcb9934a42be9381ea6295e9f9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1313.062544] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 480bd7fcb9934a42be9381ea6295e9f9 [ 1313.063176] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2deff09f-d24f-4609-91f2-1585e8407c2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.063625] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 1af05f3f7bb54cbc93230a714ee51237 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1313.071876] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1af05f3f7bb54cbc93230a714ee51237 [ 1313.072510] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3f36f081-2851-4339-860d-0a302ef4ee2c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.073281] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg c454369d04a2411e88777aced8833d2e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1313.081478] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c454369d04a2411e88777aced8833d2e [ 1313.082085] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 913ddb91-9d46-459e-8775-c9f380ed3cc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.082519] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b7bf2a1eccce4f7cae9d05731d29a3e0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1313.090771] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7bf2a1eccce4f7cae9d05731d29a3e0 [ 1313.091357] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 07efd13e-40d0-4158-b17c-6f5c75474ce3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.091776] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg e0f715ec3e1e4007b0c02e9f2b73332d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1313.100794] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0f715ec3e1e4007b0c02e9f2b73332d [ 1313.101382] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 8a156903-e4cf-43ed-9c6a-962a06ff9ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.101928] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 02dbbacb7cb940d8b8340f1b7e5064e4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1313.110094] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02dbbacb7cb940d8b8340f1b7e5064e4 [ 1313.110664] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388d71f2-b229-4666-a53d-d5b07e498eed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.111090] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 51233c29b9c045888657e6ab5057a92b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1313.119507] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51233c29b9c045888657e6ab5057a92b [ 1313.120092] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 58ae8579-4ea3-45ba-a982-10e0ca82874c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.120503] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 567f410cb35b4e298af34f33e9cadd73 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1313.128415] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 567f410cb35b4e298af34f33e9cadd73 [ 1313.128866] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c0daf074-eecb-4899-938f-477031efc6d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.129285] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 29619c55271b458cb1f2e978f33411a1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1313.137969] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29619c55271b458cb1f2e978f33411a1 [ 1313.138561] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 61fea037-aac3-47ef-aa6a-5dfa657d840d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.138802] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1313.138946] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1313.359325] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f71c475-31f0-409e-9f5a-e2f4b80377e9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.366788] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5938a3ad-ec08-4a3d-8f65-63777a117e31 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.396406] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-347fcbcd-86be-421e-9378-f459c2a4020c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.403398] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d630c0-dbc1-443a-b9d8-4f3892090f9f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.415895] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1313.416416] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5dc0955038bf4078b1b5c707b40b8b4a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1313.423441] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5dc0955038bf4078b1b5c707b40b8b4a [ 1313.424313] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1313.426634] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8771edf49a4542889fd03fc554891d44 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1313.437376] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8771edf49a4542889fd03fc554891d44 [ 1313.438012] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1313.438201] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.475s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1314.437589] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1314.437857] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1314.437890] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1314.438538] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 71a917f49bf248da96796c5096ace27d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1314.455514] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71a917f49bf248da96796c5096ace27d [ 1314.457801] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.457947] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.458207] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.458366] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 158406db-7196-4826-aefa-20a58daa186b] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.458592] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.458729] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.458852] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.458974] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.459105] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.459225] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.459343] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1314.526434] env[62740]: WARNING oslo_vmware.rw_handles [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1314.526434] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1314.526434] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1314.526434] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1314.526434] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1314.526434] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1314.526434] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1314.526434] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1314.526434] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1314.526434] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1314.526434] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1314.526434] env[62740]: ERROR oslo_vmware.rw_handles [ 1314.526798] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/29a34274-7c9e-428f-896e-75961cb931c4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1314.529151] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1314.529405] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Copying Virtual Disk [datastore2] vmware_temp/29a34274-7c9e-428f-896e-75961cb931c4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/29a34274-7c9e-428f-896e-75961cb931c4/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1314.529654] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80056575-92c8-4ff4-869e-2798b33f7e03 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.538985] env[62740]: DEBUG oslo_vmware.api [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 1314.538985] env[62740]: value = "task-640253" [ 1314.538985] env[62740]: _type = "Task" [ 1314.538985] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.546979] env[62740]: DEBUG oslo_vmware.api [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': task-640253, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.890926] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1314.891141] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1315.049488] env[62740]: DEBUG oslo_vmware.exceptions [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1315.049795] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1315.050357] env[62740]: ERROR nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1315.050357] env[62740]: Faults: ['InvalidArgument'] [ 1315.050357] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Traceback (most recent call last): [ 1315.050357] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1315.050357] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] yield resources [ 1315.050357] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1315.050357] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] self.driver.spawn(context, instance, image_meta, [ 1315.050357] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1315.050357] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1315.050357] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1315.050357] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] self._fetch_image_if_missing(context, vi) [ 1315.050357] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1315.050664] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] image_cache(vi, tmp_image_ds_loc) [ 1315.050664] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1315.050664] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] vm_util.copy_virtual_disk( [ 1315.050664] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1315.050664] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] session._wait_for_task(vmdk_copy_task) [ 1315.050664] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1315.050664] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] return self.wait_for_task(task_ref) [ 1315.050664] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1315.050664] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] return evt.wait() [ 1315.050664] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1315.050664] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] result = hub.switch() [ 1315.050664] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1315.050664] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] return self.greenlet.switch() [ 1315.050968] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1315.050968] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] self.f(*self.args, **self.kw) [ 1315.050968] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1315.050968] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] raise exceptions.translate_fault(task_info.error) [ 1315.050968] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1315.050968] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Faults: ['InvalidArgument'] [ 1315.050968] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] [ 1315.050968] env[62740]: INFO nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Terminating instance [ 1315.053343] env[62740]: DEBUG nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1315.053545] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1315.053827] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1315.054032] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1315.054739] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799f64f0-ff5d-4b2e-8f3d-de3022e5644c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.057255] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb42a5dd-53e9-493e-bbe1-ac4c24f28f7c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.063182] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1315.063388] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f7a05ce-791d-4a99-8c39-6d1334dc12f9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.065370] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1315.065544] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1315.066457] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49a11393-751d-4df6-ae57-b560bb8294f9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.070713] env[62740]: DEBUG oslo_vmware.api [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Waiting for the task: (returnval){ [ 1315.070713] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5238bb7e-417f-5c48-3a2b-bfcfdae41f20" [ 1315.070713] env[62740]: _type = "Task" [ 1315.070713] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.079983] env[62740]: DEBUG oslo_vmware.api [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5238bb7e-417f-5c48-3a2b-bfcfdae41f20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.128835] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1315.129050] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1315.129301] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Deleting the datastore file [datastore2] 56106517-e735-4bf5-8d5a-dc0d4aab3991 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1315.129557] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85d5615f-cb9c-4ea8-b544-82cbdb85cdf6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.135320] env[62740]: DEBUG oslo_vmware.api [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 1315.135320] env[62740]: value = "task-640255" [ 1315.135320] env[62740]: _type = "Task" [ 1315.135320] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.142909] env[62740]: DEBUG oslo_vmware.api [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': task-640255, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.582347] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1315.582699] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Creating directory with path [datastore2] vmware_temp/727c0b8a-a9a0-4d48-baa0-5181d74a8185/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1315.582829] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-322c09a3-d5a6-4df1-bc71-4a453c03a887 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.593597] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Created directory with path [datastore2] vmware_temp/727c0b8a-a9a0-4d48-baa0-5181d74a8185/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1315.593779] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Fetch image to [datastore2] vmware_temp/727c0b8a-a9a0-4d48-baa0-5181d74a8185/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1315.593947] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/727c0b8a-a9a0-4d48-baa0-5181d74a8185/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1315.594643] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05794402-ba09-43ed-ade3-a7889eae2069 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.600959] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e340cbfc-4a03-4e20-8ebb-f9010e32ac9a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.609550] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474ea56c-b950-49ab-807e-47cffeacad4f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.641427] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b195147-8177-454c-b257-efd4d26f02df {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.648091] env[62740]: DEBUG oslo_vmware.api [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': task-640255, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07231} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.649520] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1315.649706] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1315.649979] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1315.650069] env[62740]: INFO nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1315.651802] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7ed0709b-7430-4452-96f4-dbb0559281cd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.653629] env[62740]: DEBUG nova.compute.claims [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1315.653799] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.654018] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.655872] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 97653ebc9aa447f6b7260523ddafd47a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1315.677298] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1315.696888] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97653ebc9aa447f6b7260523ddafd47a [ 1315.736277] env[62740]: DEBUG oslo_vmware.rw_handles [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/727c0b8a-a9a0-4d48-baa0-5181d74a8185/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1315.799862] env[62740]: DEBUG oslo_vmware.rw_handles [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1315.800068] env[62740]: DEBUG oslo_vmware.rw_handles [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/727c0b8a-a9a0-4d48-baa0-5181d74a8185/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1315.890713] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1315.993996] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9eb699-5acb-4f86-ba37-a5f99f78a1f7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.000615] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1798a76-d570-4fe6-9941-a736fe39fb90 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.030881] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d81c3fb-4c66-4435-9163-cd088b95c4ce {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.038265] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f8ecb7-7569-413a-bf9d-63f07b442b67 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.051717] env[62740]: DEBUG nova.compute.provider_tree [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1316.052139] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg fecf9831ae8c4dc8b705879746f2ce5b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.060327] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fecf9831ae8c4dc8b705879746f2ce5b [ 1316.060921] env[62740]: DEBUG nova.scheduler.client.report [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1316.063543] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 643c9112844b4e08813a12ef2557f6d8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.074729] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 643c9112844b4e08813a12ef2557f6d8 [ 1316.075566] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.421s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.076093] env[62740]: ERROR nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1316.076093] env[62740]: Faults: ['InvalidArgument'] [ 1316.076093] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Traceback (most recent call last): [ 1316.076093] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1316.076093] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] self.driver.spawn(context, instance, image_meta, [ 1316.076093] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1316.076093] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1316.076093] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1316.076093] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] self._fetch_image_if_missing(context, vi) [ 1316.076093] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1316.076093] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] image_cache(vi, tmp_image_ds_loc) [ 1316.076093] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1316.076396] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] vm_util.copy_virtual_disk( [ 1316.076396] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1316.076396] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] session._wait_for_task(vmdk_copy_task) [ 1316.076396] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1316.076396] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] return self.wait_for_task(task_ref) [ 1316.076396] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1316.076396] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] return evt.wait() [ 1316.076396] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1316.076396] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] result = hub.switch() [ 1316.076396] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1316.076396] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] return self.greenlet.switch() [ 1316.076396] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1316.076396] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] self.f(*self.args, **self.kw) [ 1316.076679] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1316.076679] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] raise exceptions.translate_fault(task_info.error) [ 1316.076679] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1316.076679] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Faults: ['InvalidArgument'] [ 1316.076679] env[62740]: ERROR nova.compute.manager [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] [ 1316.076823] env[62740]: DEBUG nova.compute.utils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1316.078317] env[62740]: DEBUG nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Build of instance 56106517-e735-4bf5-8d5a-dc0d4aab3991 was re-scheduled: A specified parameter was not correct: fileType [ 1316.078317] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1316.078808] env[62740]: DEBUG nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1316.078965] env[62740]: DEBUG nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1316.079155] env[62740]: DEBUG nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1316.079319] env[62740]: DEBUG nova.network.neutron [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1316.365144] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 05bd7eaf95a044289fd7e5b1c87b6695 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.376791] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05bd7eaf95a044289fd7e5b1c87b6695 [ 1316.377608] env[62740]: DEBUG nova.network.neutron [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.378215] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 35f9549e390b454996bf002ca16ef523 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.389532] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35f9549e390b454996bf002ca16ef523 [ 1316.390429] env[62740]: INFO nova.compute.manager [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Took 0.31 seconds to deallocate network for instance. [ 1316.391600] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg ebd227614faf4374aa30f8dc99b5363b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.426843] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebd227614faf4374aa30f8dc99b5363b [ 1316.429550] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 61bd2b8017804251b5c4624218e39f91 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.458768] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61bd2b8017804251b5c4624218e39f91 [ 1316.486715] env[62740]: INFO nova.scheduler.client.report [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Deleted allocations for instance 56106517-e735-4bf5-8d5a-dc0d4aab3991 [ 1316.492917] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 05b7097e5ff545ca9f2358d083011094 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.514026] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05b7097e5ff545ca9f2358d083011094 [ 1316.514656] env[62740]: DEBUG oslo_concurrency.lockutils [None req-505ae29c-65a4-4292-be5d-42e30173a18b tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "56106517-e735-4bf5-8d5a-dc0d4aab3991" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 314.762s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.515218] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 368e045354ff4422b38afe7918d03364 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.515971] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "56106517-e735-4bf5-8d5a-dc0d4aab3991" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 134.256s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.518254] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] During sync_power_state the instance has a pending task (spawning). Skip. [ 1316.518254] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "56106517-e735-4bf5-8d5a-dc0d4aab3991" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.518254] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "56106517-e735-4bf5-8d5a-dc0d4aab3991" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 119.140s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.518254] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "56106517-e735-4bf5-8d5a-dc0d4aab3991-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1316.518598] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "56106517-e735-4bf5-8d5a-dc0d4aab3991-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.518598] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "56106517-e735-4bf5-8d5a-dc0d4aab3991-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.519462] env[62740]: INFO nova.compute.manager [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Terminating instance [ 1316.521222] env[62740]: DEBUG nova.compute.manager [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1316.521420] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1316.521706] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc01a915-319e-4521-9da4-0f6a6db88a8d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.531606] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a4aefd-d176-4895-998c-2861281eb6f3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.542517] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 368e045354ff4422b38afe7918d03364 [ 1316.543035] env[62740]: DEBUG nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1316.544768] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 6e970ee7292140d99392e78f5513bb63 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.564943] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 56106517-e735-4bf5-8d5a-dc0d4aab3991 could not be found. [ 1316.565131] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1316.565305] env[62740]: INFO nova.compute.manager [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1316.565538] env[62740]: DEBUG oslo.service.loopingcall [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1316.565754] env[62740]: DEBUG nova.compute.manager [-] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1316.565842] env[62740]: DEBUG nova.network.neutron [-] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1316.577112] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e970ee7292140d99392e78f5513bb63 [ 1316.587578] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ce4072d8b6c6441b940aa92c38b03db4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.591442] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1316.591766] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.593245] env[62740]: INFO nova.compute.claims [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1316.594884] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 5f0482686a674251a45bda796609b373 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.596052] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce4072d8b6c6441b940aa92c38b03db4 [ 1316.596476] env[62740]: DEBUG nova.network.neutron [-] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.596795] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 17ccf31b0d7e4041b09918cbe8fba7df in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.604175] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17ccf31b0d7e4041b09918cbe8fba7df [ 1316.604586] env[62740]: INFO nova.compute.manager [-] [instance: 56106517-e735-4bf5-8d5a-dc0d4aab3991] Took 0.04 seconds to deallocate network for instance. [ 1316.607881] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg d35d1f8667b544bc879f4c66037a2133 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.634949] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f0482686a674251a45bda796609b373 [ 1316.636652] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 21b4d47022a7426185b90bd36f205d35 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.643067] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21b4d47022a7426185b90bd36f205d35 [ 1316.646678] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d35d1f8667b544bc879f4c66037a2133 [ 1316.659340] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg ec17f36f57ef475b9b073bd361ab7c0a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.699037] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec17f36f57ef475b9b073bd361ab7c0a [ 1316.701746] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "56106517-e735-4bf5-8d5a-dc0d4aab3991" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.702090] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b8f389c4-01b7-4a36-b18b-6706bd672fd2 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 3f5a8f644fe7446696357eeb10192e55 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.712964] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f5a8f644fe7446696357eeb10192e55 [ 1316.864106] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a6e91f-d195-4738-ab0b-34c0e675463a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.871845] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2427330b-8bdf-423f-b484-7e4b3a3b6300 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.900505] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68552a8e-2053-4f73-abde-26b361310611 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.907535] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e2a41a-7e58-4d6a-9e16-78860253f5cf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.920663] env[62740]: DEBUG nova.compute.provider_tree [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1316.921229] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 4bab09794c9d497b8662fa32423911db in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.929794] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bab09794c9d497b8662fa32423911db [ 1316.930773] env[62740]: DEBUG nova.scheduler.client.report [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1316.933240] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 77819b107874454998f409e073f4cc9f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.944074] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77819b107874454998f409e073f4cc9f [ 1316.944785] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.353s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.945273] env[62740]: DEBUG nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1316.946881] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg de910caccb3c4eacbe41835d1f22a257 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.977980] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de910caccb3c4eacbe41835d1f22a257 [ 1316.980547] env[62740]: DEBUG nova.compute.utils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1316.981158] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 39f3b0b6dba845ffb9afa37e38c3363a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1316.982159] env[62740]: DEBUG nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1316.982318] env[62740]: DEBUG nova.network.neutron [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1316.992021] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39f3b0b6dba845ffb9afa37e38c3363a [ 1316.992559] env[62740]: DEBUG nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1316.994251] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 9a42751723834b0281077574df9582ba in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1317.024636] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a42751723834b0281077574df9582ba [ 1317.027390] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg cecc2f83f4e54736994ac1bd51f45fcb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1317.056759] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cecc2f83f4e54736994ac1bd51f45fcb [ 1317.058059] env[62740]: DEBUG nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1317.063392] env[62740]: DEBUG nova.policy [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0abe8c79c054df7aa562b0f506ce67b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99d898b3fc31404b8a4667171ef22010', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1317.079086] env[62740]: DEBUG nova.virt.hardware [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1317.079321] env[62740]: DEBUG nova.virt.hardware [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1317.079559] env[62740]: DEBUG nova.virt.hardware [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1317.079781] env[62740]: DEBUG nova.virt.hardware [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1317.079937] env[62740]: DEBUG nova.virt.hardware [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1317.080175] env[62740]: DEBUG nova.virt.hardware [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1317.080459] env[62740]: DEBUG nova.virt.hardware [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1317.080654] env[62740]: DEBUG nova.virt.hardware [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1317.080901] env[62740]: DEBUG nova.virt.hardware [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1317.081056] env[62740]: DEBUG nova.virt.hardware [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1317.081248] env[62740]: DEBUG nova.virt.hardware [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1317.082340] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9bee164-62b3-4999-9191-e3a224f6ac3e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.090402] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f71221-a3c7-4d63-9b7f-893d76853f9f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.599400] env[62740]: DEBUG nova.network.neutron [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Successfully created port: fc828731-552d-42f8-9a31-c67e83f00784 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1318.339583] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "86c57375-8328-4344-b228-2f1ce6efc71e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.339827] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "86c57375-8328-4344-b228-2f1ce6efc71e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1318.406184] env[62740]: DEBUG nova.network.neutron [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Successfully updated port: fc828731-552d-42f8-9a31-c67e83f00784 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1318.406184] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg f5c647be6d12482c84a750a94fc64d58 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1318.414217] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5c647be6d12482c84a750a94fc64d58 [ 1318.415021] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Acquiring lock "refresh_cache-1ff3f5e9-284a-49b2-ad55-2c42f9b051c7" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1318.415021] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Acquired lock "refresh_cache-1ff3f5e9-284a-49b2-ad55-2c42f9b051c7" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1318.415224] env[62740]: DEBUG nova.network.neutron [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1318.415554] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 9f25c81c53934984aa5a1d2d46cd3380 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1318.425908] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f25c81c53934984aa5a1d2d46cd3380 [ 1318.460067] env[62740]: DEBUG nova.compute.manager [req-7ff27338-16af-473d-bfaa-1912370182a0 req-29a875a0-3948-4e15-b123-8d7362c934dc service nova] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Received event network-vif-plugged-fc828731-552d-42f8-9a31-c67e83f00784 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1318.460252] env[62740]: DEBUG oslo_concurrency.lockutils [req-7ff27338-16af-473d-bfaa-1912370182a0 req-29a875a0-3948-4e15-b123-8d7362c934dc service nova] Acquiring lock "1ff3f5e9-284a-49b2-ad55-2c42f9b051c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.460458] env[62740]: DEBUG oslo_concurrency.lockutils [req-7ff27338-16af-473d-bfaa-1912370182a0 req-29a875a0-3948-4e15-b123-8d7362c934dc service nova] Lock "1ff3f5e9-284a-49b2-ad55-2c42f9b051c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1318.460628] env[62740]: DEBUG oslo_concurrency.lockutils [req-7ff27338-16af-473d-bfaa-1912370182a0 req-29a875a0-3948-4e15-b123-8d7362c934dc service nova] Lock "1ff3f5e9-284a-49b2-ad55-2c42f9b051c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1318.460796] env[62740]: DEBUG nova.compute.manager [req-7ff27338-16af-473d-bfaa-1912370182a0 req-29a875a0-3948-4e15-b123-8d7362c934dc service nova] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] No waiting events found dispatching network-vif-plugged-fc828731-552d-42f8-9a31-c67e83f00784 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1318.460959] env[62740]: WARNING nova.compute.manager [req-7ff27338-16af-473d-bfaa-1912370182a0 req-29a875a0-3948-4e15-b123-8d7362c934dc service nova] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Received unexpected event network-vif-plugged-fc828731-552d-42f8-9a31-c67e83f00784 for instance with vm_state building and task_state spawning. [ 1318.487387] env[62740]: DEBUG nova.network.neutron [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1318.674664] env[62740]: DEBUG nova.network.neutron [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Updating instance_info_cache with network_info: [{"id": "fc828731-552d-42f8-9a31-c67e83f00784", "address": "fa:16:3e:87:04:c6", "network": {"id": "86cfcbff-30e2-43b7-86cf-e6e1ee02ec8c", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1967981449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99d898b3fc31404b8a4667171ef22010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc828731-55", "ovs_interfaceid": "fc828731-552d-42f8-9a31-c67e83f00784", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.675244] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 2d63e56b310d4ac19c6a25907e61d9df in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1318.685512] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d63e56b310d4ac19c6a25907e61d9df [ 1318.686142] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Releasing lock "refresh_cache-1ff3f5e9-284a-49b2-ad55-2c42f9b051c7" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1318.686538] env[62740]: DEBUG nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Instance network_info: |[{"id": "fc828731-552d-42f8-9a31-c67e83f00784", "address": "fa:16:3e:87:04:c6", "network": {"id": "86cfcbff-30e2-43b7-86cf-e6e1ee02ec8c", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1967981449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99d898b3fc31404b8a4667171ef22010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc828731-55", "ovs_interfaceid": "fc828731-552d-42f8-9a31-c67e83f00784", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1318.686982] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:04:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1da5fc2-0280-4f76-ac97-20ea4bc7bb16', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc828731-552d-42f8-9a31-c67e83f00784', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1318.694952] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Creating folder: Project (99d898b3fc31404b8a4667171ef22010). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1318.695469] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-400aac95-e00e-434b-867c-e389f0bfde01 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.706958] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Created folder: Project (99d898b3fc31404b8a4667171ef22010) in parent group-v156037. [ 1318.707157] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Creating folder: Instances. Parent ref: group-v156149. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1318.707377] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1eaec3a-3b20-4fea-abfc-96c3a762a46b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.716265] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Created folder: Instances in parent group-v156149. [ 1318.716491] env[62740]: DEBUG oslo.service.loopingcall [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1318.716670] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1318.716862] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-157f0e0b-427c-4b9d-a636-5eab8a349843 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.735170] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1318.735170] env[62740]: value = "task-640258" [ 1318.735170] env[62740]: _type = "Task" [ 1318.735170] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.742519] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640258, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.244595] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640258, 'name': CreateVM_Task, 'duration_secs': 0.353529} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.244854] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1319.245530] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1319.245704] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.246076] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1319.246341] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5f78e45-ac34-4599-9d87-693cb2dcc32d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.250824] env[62740]: DEBUG oslo_vmware.api [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Waiting for the task: (returnval){ [ 1319.250824] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5282ac78-611c-fd79-1dea-cc4d8f4b2f77" [ 1319.250824] env[62740]: _type = "Task" [ 1319.250824] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.259265] env[62740]: DEBUG oslo_vmware.api [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5282ac78-611c-fd79-1dea-cc4d8f4b2f77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.761342] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1319.761603] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1319.761810] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1320.519528] env[62740]: DEBUG nova.compute.manager [req-f99cf08f-8323-45fe-9596-f94773382105 req-535e8aca-c7cf-422a-80b5-3728ec0ef3a5 service nova] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Received event network-changed-fc828731-552d-42f8-9a31-c67e83f00784 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1320.519657] env[62740]: DEBUG nova.compute.manager [req-f99cf08f-8323-45fe-9596-f94773382105 req-535e8aca-c7cf-422a-80b5-3728ec0ef3a5 service nova] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Refreshing instance network info cache due to event network-changed-fc828731-552d-42f8-9a31-c67e83f00784. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1320.519871] env[62740]: DEBUG oslo_concurrency.lockutils [req-f99cf08f-8323-45fe-9596-f94773382105 req-535e8aca-c7cf-422a-80b5-3728ec0ef3a5 service nova] Acquiring lock "refresh_cache-1ff3f5e9-284a-49b2-ad55-2c42f9b051c7" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1320.524405] env[62740]: DEBUG oslo_concurrency.lockutils [req-f99cf08f-8323-45fe-9596-f94773382105 req-535e8aca-c7cf-422a-80b5-3728ec0ef3a5 service nova] Acquired lock "refresh_cache-1ff3f5e9-284a-49b2-ad55-2c42f9b051c7" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.524405] env[62740]: DEBUG nova.network.neutron [req-f99cf08f-8323-45fe-9596-f94773382105 req-535e8aca-c7cf-422a-80b5-3728ec0ef3a5 service nova] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Refreshing network info cache for port fc828731-552d-42f8-9a31-c67e83f00784 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1320.524405] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-f99cf08f-8323-45fe-9596-f94773382105 req-535e8aca-c7cf-422a-80b5-3728ec0ef3a5 service nova] Expecting reply to msg b856215fbcc34a128a2feae3c6f3df86 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1320.531895] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b856215fbcc34a128a2feae3c6f3df86 [ 1321.054907] env[62740]: DEBUG nova.network.neutron [req-f99cf08f-8323-45fe-9596-f94773382105 req-535e8aca-c7cf-422a-80b5-3728ec0ef3a5 service nova] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Updated VIF entry in instance network info cache for port fc828731-552d-42f8-9a31-c67e83f00784. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1321.055297] env[62740]: DEBUG nova.network.neutron [req-f99cf08f-8323-45fe-9596-f94773382105 req-535e8aca-c7cf-422a-80b5-3728ec0ef3a5 service nova] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Updating instance_info_cache with network_info: [{"id": "fc828731-552d-42f8-9a31-c67e83f00784", "address": "fa:16:3e:87:04:c6", "network": {"id": "86cfcbff-30e2-43b7-86cf-e6e1ee02ec8c", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1967981449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99d898b3fc31404b8a4667171ef22010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc828731-55", "ovs_interfaceid": "fc828731-552d-42f8-9a31-c67e83f00784", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.055804] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-f99cf08f-8323-45fe-9596-f94773382105 req-535e8aca-c7cf-422a-80b5-3728ec0ef3a5 service nova] Expecting reply to msg 11155eb49d4a42a39ba1f91476accf21 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1321.063829] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11155eb49d4a42a39ba1f91476accf21 [ 1321.064430] env[62740]: DEBUG oslo_concurrency.lockutils [req-f99cf08f-8323-45fe-9596-f94773382105 req-535e8aca-c7cf-422a-80b5-3728ec0ef3a5 service nova] Releasing lock "refresh_cache-1ff3f5e9-284a-49b2-ad55-2c42f9b051c7" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1325.569015] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg a10da7027fdd40d8b040c6489b4750da in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1325.578917] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a10da7027fdd40d8b040c6489b4750da [ 1325.579406] env[62740]: DEBUG oslo_concurrency.lockutils [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "ba23ede2-be42-48ac-b281-571ccd158dee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.531710] env[62740]: WARNING oslo_vmware.rw_handles [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1329.531710] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1329.531710] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1329.531710] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1329.531710] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1329.531710] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1329.531710] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1329.531710] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1329.531710] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1329.531710] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1329.531710] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1329.531710] env[62740]: ERROR oslo_vmware.rw_handles [ 1329.532428] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/1eb52734-d408-4866-b4e8-a91ca0e75903/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1329.534355] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1329.534615] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Copying Virtual Disk [datastore1] vmware_temp/1eb52734-d408-4866-b4e8-a91ca0e75903/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/1eb52734-d408-4866-b4e8-a91ca0e75903/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1329.534942] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ea1184d-dd31-4723-a29a-4ea1f0c6686f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.542990] env[62740]: DEBUG oslo_vmware.api [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Waiting for the task: (returnval){ [ 1329.542990] env[62740]: value = "task-640259" [ 1329.542990] env[62740]: _type = "Task" [ 1329.542990] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.551057] env[62740]: DEBUG oslo_vmware.api [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Task: {'id': task-640259, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.823374] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 7c606c009c2c402292f8643738f1714c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1329.832923] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c606c009c2c402292f8643738f1714c [ 1329.833417] env[62740]: DEBUG oslo_concurrency.lockutils [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Acquiring lock "1ff3f5e9-284a-49b2-ad55-2c42f9b051c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.053181] env[62740]: DEBUG oslo_vmware.exceptions [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1330.053514] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.054080] env[62740]: ERROR nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1330.054080] env[62740]: Faults: ['InvalidArgument'] [ 1330.054080] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] Traceback (most recent call last): [ 1330.054080] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1330.054080] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] yield resources [ 1330.054080] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1330.054080] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] self.driver.spawn(context, instance, image_meta, [ 1330.054080] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1330.054080] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1330.054080] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1330.054080] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] self._fetch_image_if_missing(context, vi) [ 1330.054080] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1330.054384] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] image_cache(vi, tmp_image_ds_loc) [ 1330.054384] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1330.054384] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] vm_util.copy_virtual_disk( [ 1330.054384] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1330.054384] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] session._wait_for_task(vmdk_copy_task) [ 1330.054384] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1330.054384] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] return self.wait_for_task(task_ref) [ 1330.054384] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1330.054384] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] return evt.wait() [ 1330.054384] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1330.054384] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] result = hub.switch() [ 1330.054384] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1330.054384] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] return self.greenlet.switch() [ 1330.054675] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1330.054675] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] self.f(*self.args, **self.kw) [ 1330.054675] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1330.054675] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] raise exceptions.translate_fault(task_info.error) [ 1330.054675] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1330.054675] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] Faults: ['InvalidArgument'] [ 1330.054675] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] [ 1330.054675] env[62740]: INFO nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Terminating instance [ 1330.056025] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.056263] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1330.056518] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-beff10ed-063d-4015-8f04-e1ed46ec2d2a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.058970] env[62740]: DEBUG nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1330.059181] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1330.059975] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd32e75-78a1-418d-949d-4dce16843d94 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.068088] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1330.069119] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4ad40b4-db10-475a-9b04-621a26f6ab67 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.070485] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1330.070660] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1330.071310] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-193f4ae0-2953-4dd5-93fd-956a3338178b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.075829] env[62740]: DEBUG oslo_vmware.api [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for the task: (returnval){ [ 1330.075829] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52c5e700-f950-13d2-811b-f4817292ae99" [ 1330.075829] env[62740]: _type = "Task" [ 1330.075829] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.082544] env[62740]: DEBUG oslo_vmware.api [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52c5e700-f950-13d2-811b-f4817292ae99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.132878] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1330.133133] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1330.133325] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Deleting the datastore file [datastore1] 472cd209-4192-4473-b788-d1ea342653bf {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1330.133579] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-372bbef8-a235-46e0-8434-47befbec68e6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.139918] env[62740]: DEBUG oslo_vmware.api [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Waiting for the task: (returnval){ [ 1330.139918] env[62740]: value = "task-640261" [ 1330.139918] env[62740]: _type = "Task" [ 1330.139918] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.147443] env[62740]: DEBUG oslo_vmware.api [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Task: {'id': task-640261, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.588039] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1330.588039] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Creating directory with path [datastore1] vmware_temp/15c562bf-6ab6-458a-9532-394b8aeb98d0/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1330.588039] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-007d78a4-7cf6-4f33-8504-93384a43da82 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.600141] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Created directory with path [datastore1] vmware_temp/15c562bf-6ab6-458a-9532-394b8aeb98d0/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1330.600337] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Fetch image to [datastore1] vmware_temp/15c562bf-6ab6-458a-9532-394b8aeb98d0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1330.600508] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/15c562bf-6ab6-458a-9532-394b8aeb98d0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1330.601277] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c3d61a-23b1-45ac-8699-923e15b74010 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.607868] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681bdc9b-7c36-49e9-b02f-209834c6f65c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.616685] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27140f05-6218-4656-92ba-add7bbab4073 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.649358] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186a2ce2-2d73-4553-b4b5-3e54636c153f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.656516] env[62740]: DEBUG oslo_vmware.api [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Task: {'id': task-640261, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084216} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.657891] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1330.658091] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1330.658272] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1330.658467] env[62740]: INFO nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1330.660204] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c1406c37-548f-4105-842a-e5eaca0e4361 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.662014] env[62740]: DEBUG nova.compute.claims [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1330.662196] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.662413] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.664292] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 51bf209d0eb548618689618918316715 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1330.683753] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1330.703439] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51bf209d0eb548618689618918316715 [ 1330.735921] env[62740]: DEBUG oslo_vmware.rw_handles [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/15c562bf-6ab6-458a-9532-394b8aeb98d0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1330.800731] env[62740]: DEBUG oslo_vmware.rw_handles [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1330.800931] env[62740]: DEBUG oslo_vmware.rw_handles [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/15c562bf-6ab6-458a-9532-394b8aeb98d0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1330.985278] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caea911b-6d45-4603-b598-11eb48bf988e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.992173] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cff29ac-1f51-44f6-85fc-adae776d4635 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.021117] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8fe77ff-f30e-41b8-bc0c-782a80d8b2c2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.028302] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a406bc4e-81ec-44c1-baf0-3f339ba1943a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.041105] env[62740]: DEBUG nova.compute.provider_tree [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.041594] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg ac22219b16d74fcb877037131dc103ce in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.049202] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac22219b16d74fcb877037131dc103ce [ 1331.050791] env[62740]: DEBUG nova.scheduler.client.report [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1331.052420] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 5cde7d10eb994bb6b730df82d7ba6e63 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.064090] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cde7d10eb994bb6b730df82d7ba6e63 [ 1331.064793] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.402s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.065330] env[62740]: ERROR nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1331.065330] env[62740]: Faults: ['InvalidArgument'] [ 1331.065330] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] Traceback (most recent call last): [ 1331.065330] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1331.065330] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] self.driver.spawn(context, instance, image_meta, [ 1331.065330] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1331.065330] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1331.065330] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1331.065330] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] self._fetch_image_if_missing(context, vi) [ 1331.065330] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1331.065330] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] image_cache(vi, tmp_image_ds_loc) [ 1331.065330] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1331.065662] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] vm_util.copy_virtual_disk( [ 1331.065662] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1331.065662] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] session._wait_for_task(vmdk_copy_task) [ 1331.065662] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1331.065662] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] return self.wait_for_task(task_ref) [ 1331.065662] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1331.065662] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] return evt.wait() [ 1331.065662] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1331.065662] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] result = hub.switch() [ 1331.065662] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1331.065662] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] return self.greenlet.switch() [ 1331.065662] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1331.065662] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] self.f(*self.args, **self.kw) [ 1331.065945] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1331.065945] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] raise exceptions.translate_fault(task_info.error) [ 1331.065945] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1331.065945] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] Faults: ['InvalidArgument'] [ 1331.065945] env[62740]: ERROR nova.compute.manager [instance: 472cd209-4192-4473-b788-d1ea342653bf] [ 1331.066072] env[62740]: DEBUG nova.compute.utils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1331.067497] env[62740]: DEBUG nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Build of instance 472cd209-4192-4473-b788-d1ea342653bf was re-scheduled: A specified parameter was not correct: fileType [ 1331.067497] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1331.067870] env[62740]: DEBUG nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1331.068062] env[62740]: DEBUG nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1331.068221] env[62740]: DEBUG nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1331.068382] env[62740]: DEBUG nova.network.neutron [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1331.405309] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg dd014df8d52947c0b342c1408d32274b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.418426] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd014df8d52947c0b342c1408d32274b [ 1331.419668] env[62740]: DEBUG nova.network.neutron [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.419668] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg fad35f27f57a4481b6380f34605f65c1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.435546] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fad35f27f57a4481b6380f34605f65c1 [ 1331.435546] env[62740]: INFO nova.compute.manager [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Took 0.37 seconds to deallocate network for instance. [ 1331.437024] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 1f329fe3b00a4ea2befd490ea16a7c33 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.483972] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f329fe3b00a4ea2befd490ea16a7c33 [ 1331.486605] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 076a847e0a1a47878618c37da185a69a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.519732] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 076a847e0a1a47878618c37da185a69a [ 1331.552571] env[62740]: INFO nova.scheduler.client.report [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Deleted allocations for instance 472cd209-4192-4473-b788-d1ea342653bf [ 1331.558597] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg b2f5456e893341e5a5813dcbd703f9af in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.575724] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2f5456e893341e5a5813dcbd703f9af [ 1331.576389] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a22eee74-f86b-4132-ac94-ed3a8d05f085 tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "472cd209-4192-4473-b788-d1ea342653bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 494.841s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.577029] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 7ffffa863186402bbfa9a9a220dfe6a2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.577740] env[62740]: DEBUG oslo_concurrency.lockutils [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "472cd209-4192-4473-b788-d1ea342653bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 297.259s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.577965] env[62740]: DEBUG oslo_concurrency.lockutils [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Acquiring lock "472cd209-4192-4473-b788-d1ea342653bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.578190] env[62740]: DEBUG oslo_concurrency.lockutils [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "472cd209-4192-4473-b788-d1ea342653bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.578360] env[62740]: DEBUG oslo_concurrency.lockutils [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "472cd209-4192-4473-b788-d1ea342653bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.580982] env[62740]: INFO nova.compute.manager [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Terminating instance [ 1331.583095] env[62740]: DEBUG nova.compute.manager [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1331.583323] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1331.583833] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b1b77760-70f5-4c3c-8742-2aaa75e6726e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.592494] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ffffa863186402bbfa9a9a220dfe6a2 [ 1331.595528] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12397d9f-a27a-4ab0-bfb2-a5d5b392b634 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.605859] env[62740]: DEBUG nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1331.607634] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 760133993ae54cbf89043f612cd3217e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.625995] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 472cd209-4192-4473-b788-d1ea342653bf could not be found. [ 1331.626244] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1331.626454] env[62740]: INFO nova.compute.manager [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1331.626670] env[62740]: DEBUG oslo.service.loopingcall [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1331.626896] env[62740]: DEBUG nova.compute.manager [-] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1331.626993] env[62740]: DEBUG nova.network.neutron [-] [instance: 472cd209-4192-4473-b788-d1ea342653bf] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1331.639186] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 760133993ae54cbf89043f612cd3217e [ 1331.644832] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3a360cca5b5846539a6c90280cfc9d29 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.651127] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a360cca5b5846539a6c90280cfc9d29 [ 1331.651598] env[62740]: DEBUG nova.network.neutron [-] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.651961] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0f8d99d6ae2c4fcf9aeb33c09e606402 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.653312] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.653541] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.655041] env[62740]: INFO nova.compute.claims [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1331.656691] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 6bb3f782ce42464080483f7303696d17 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.659284] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f8d99d6ae2c4fcf9aeb33c09e606402 [ 1331.659763] env[62740]: INFO nova.compute.manager [-] [instance: 472cd209-4192-4473-b788-d1ea342653bf] Took 0.03 seconds to deallocate network for instance. [ 1331.662966] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 647d30b061574653aef95a0d4f086196 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.689599] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bb3f782ce42464080483f7303696d17 [ 1331.690297] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 647d30b061574653aef95a0d4f086196 [ 1331.692243] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg ae0e5a62b720418a9df5e28cb5bd00c4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.698566] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae0e5a62b720418a9df5e28cb5bd00c4 [ 1331.706348] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg 1e3ca0a247004b0ba17fe950e3319b80 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.742362] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e3ca0a247004b0ba17fe950e3319b80 [ 1331.744984] env[62740]: DEBUG oslo_concurrency.lockutils [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Lock "472cd209-4192-4473-b788-d1ea342653bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.167s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.745326] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-521bbb61-a739-4bf8-997a-6c0abba7c32b tempest-MigrationsAdminTest-1066981656 tempest-MigrationsAdminTest-1066981656-project-member] Expecting reply to msg eb8ee2a4bd334e2cb3858b66b183262d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.746303] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "472cd209-4192-4473-b788-d1ea342653bf" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 149.488s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.746563] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 472cd209-4192-4473-b788-d1ea342653bf] During sync_power_state the instance has a pending task (deleting). Skip. [ 1331.746784] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "472cd209-4192-4473-b788-d1ea342653bf" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.755027] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb8ee2a4bd334e2cb3858b66b183262d [ 1331.911597] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2dfbce6-d7fc-44d1-94fb-6beff259190a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.919217] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76f6460-13a3-4a89-8509-a328eb71bf9a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.949679] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181efa0c-d06a-4882-9188-84eb61a79b2a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.957006] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc119fea-c0f0-439c-85da-9f3a9435be8c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.969972] env[62740]: DEBUG nova.compute.provider_tree [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.970491] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 95b7f362c2624b20b48ad106738ce230 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.978247] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95b7f362c2624b20b48ad106738ce230 [ 1331.979180] env[62740]: DEBUG nova.scheduler.client.report [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1331.981647] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 2ba7af026966446097b30a27d093851b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1331.992790] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ba7af026966446097b30a27d093851b [ 1331.993469] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.340s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.993960] env[62740]: DEBUG nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1331.995796] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg c79014b53c3d4cc087c032919cb4dcba in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1332.024468] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c79014b53c3d4cc087c032919cb4dcba [ 1332.026160] env[62740]: DEBUG nova.compute.utils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1332.026815] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 9dabc78382dc4f41833e50e53eeb9c9e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1332.027788] env[62740]: DEBUG nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1332.028027] env[62740]: DEBUG nova.network.neutron [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1332.036027] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9dabc78382dc4f41833e50e53eeb9c9e [ 1332.036445] env[62740]: DEBUG nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1332.038120] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 37fb63c25b364dea814f863a2d86ee71 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1332.066547] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37fb63c25b364dea814f863a2d86ee71 [ 1332.069357] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg d72edf206b0d49f0bb44ce064dcd93f8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1332.080977] env[62740]: DEBUG nova.policy [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aee525677ce346b59af803377086c7b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '511c682375de4d5294723b0d656190d2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1332.101716] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d72edf206b0d49f0bb44ce064dcd93f8 [ 1332.102887] env[62740]: DEBUG nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1332.132429] env[62740]: DEBUG nova.virt.hardware [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1332.132663] env[62740]: DEBUG nova.virt.hardware [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1332.132822] env[62740]: DEBUG nova.virt.hardware [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1332.133011] env[62740]: DEBUG nova.virt.hardware [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1332.133170] env[62740]: DEBUG nova.virt.hardware [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1332.133320] env[62740]: DEBUG nova.virt.hardware [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1332.133527] env[62740]: DEBUG nova.virt.hardware [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1332.133686] env[62740]: DEBUG nova.virt.hardware [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1332.133856] env[62740]: DEBUG nova.virt.hardware [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1332.134033] env[62740]: DEBUG nova.virt.hardware [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1332.134211] env[62740]: DEBUG nova.virt.hardware [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1332.135080] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6ba8e4-61ba-4d53-9ad0-68ba40eaefa0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.145862] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce689c8-1e3e-42f3-b4fa-d8f46fa8bcc2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.537017] env[62740]: DEBUG nova.network.neutron [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Successfully created port: 1c238b05-af1e-4dd4-ab9e-3e9905a7acb5 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1333.518859] env[62740]: DEBUG nova.compute.manager [req-5d490f88-3d46-4312-852c-048fed6fed0c req-023d292e-cf24-446a-9d8f-8d581cf652fe service nova] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Received event network-vif-plugged-1c238b05-af1e-4dd4-ab9e-3e9905a7acb5 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1333.519124] env[62740]: DEBUG oslo_concurrency.lockutils [req-5d490f88-3d46-4312-852c-048fed6fed0c req-023d292e-cf24-446a-9d8f-8d581cf652fe service nova] Acquiring lock "2deff09f-d24f-4609-91f2-1585e8407c2a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.519299] env[62740]: DEBUG oslo_concurrency.lockutils [req-5d490f88-3d46-4312-852c-048fed6fed0c req-023d292e-cf24-446a-9d8f-8d581cf652fe service nova] Lock "2deff09f-d24f-4609-91f2-1585e8407c2a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.519565] env[62740]: DEBUG oslo_concurrency.lockutils [req-5d490f88-3d46-4312-852c-048fed6fed0c req-023d292e-cf24-446a-9d8f-8d581cf652fe service nova] Lock "2deff09f-d24f-4609-91f2-1585e8407c2a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.519886] env[62740]: DEBUG nova.compute.manager [req-5d490f88-3d46-4312-852c-048fed6fed0c req-023d292e-cf24-446a-9d8f-8d581cf652fe service nova] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] No waiting events found dispatching network-vif-plugged-1c238b05-af1e-4dd4-ab9e-3e9905a7acb5 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1333.519886] env[62740]: WARNING nova.compute.manager [req-5d490f88-3d46-4312-852c-048fed6fed0c req-023d292e-cf24-446a-9d8f-8d581cf652fe service nova] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Received unexpected event network-vif-plugged-1c238b05-af1e-4dd4-ab9e-3e9905a7acb5 for instance with vm_state building and task_state spawning. [ 1333.523142] env[62740]: DEBUG nova.network.neutron [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Successfully updated port: 1c238b05-af1e-4dd4-ab9e-3e9905a7acb5 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1333.523569] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 8030e61a6f80486a9884e1cc503a020b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1333.534213] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8030e61a6f80486a9884e1cc503a020b [ 1333.534834] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "refresh_cache-2deff09f-d24f-4609-91f2-1585e8407c2a" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.534959] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquired lock "refresh_cache-2deff09f-d24f-4609-91f2-1585e8407c2a" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.535116] env[62740]: DEBUG nova.network.neutron [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1333.535497] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 39f45956bcfa43ccbfc81ba6dadf0489 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1333.543402] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39f45956bcfa43ccbfc81ba6dadf0489 [ 1333.578769] env[62740]: DEBUG nova.network.neutron [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1333.756784] env[62740]: DEBUG nova.network.neutron [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Updating instance_info_cache with network_info: [{"id": "1c238b05-af1e-4dd4-ab9e-3e9905a7acb5", "address": "fa:16:3e:6f:3e:b2", "network": {"id": "70fb46bd-81f8-49d4-bb8f-cb2fb8b6944e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2105504489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "511c682375de4d5294723b0d656190d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c238b05-af", "ovs_interfaceid": "1c238b05-af1e-4dd4-ab9e-3e9905a7acb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.757314] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg e31384953ce7465fb0310da1e9ba4e87 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1333.769756] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e31384953ce7465fb0310da1e9ba4e87 [ 1333.770399] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Releasing lock "refresh_cache-2deff09f-d24f-4609-91f2-1585e8407c2a" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1333.770686] env[62740]: DEBUG nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Instance network_info: |[{"id": "1c238b05-af1e-4dd4-ab9e-3e9905a7acb5", "address": "fa:16:3e:6f:3e:b2", "network": {"id": "70fb46bd-81f8-49d4-bb8f-cb2fb8b6944e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2105504489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "511c682375de4d5294723b0d656190d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c238b05-af", "ovs_interfaceid": "1c238b05-af1e-4dd4-ab9e-3e9905a7acb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1333.771100] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:3e:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77ccbd87-ecfd-4b2d-a1ea-29774addcef6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c238b05-af1e-4dd4-ab9e-3e9905a7acb5', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1333.778888] env[62740]: DEBUG oslo.service.loopingcall [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1333.779496] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1333.780291] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7637ff5-da77-448a-bf8d-ba9d3f50667a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.801466] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1333.801466] env[62740]: value = "task-640262" [ 1333.801466] env[62740]: _type = "Task" [ 1333.801466] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.810284] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640262, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.311363] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640262, 'name': CreateVM_Task, 'duration_secs': 0.295476} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.311534] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1334.312225] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.312396] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.312735] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1334.313053] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-328f5f4c-ab5f-4735-a421-327ac9c9180a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.317466] env[62740]: DEBUG oslo_vmware.api [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for the task: (returnval){ [ 1334.317466] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52ce8984-9330-f1eb-1a23-74af52954df8" [ 1334.317466] env[62740]: _type = "Task" [ 1334.317466] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.325011] env[62740]: DEBUG oslo_vmware.api [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52ce8984-9330-f1eb-1a23-74af52954df8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.829502] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1334.829934] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1334.829934] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.593778] env[62740]: DEBUG nova.compute.manager [req-9657a928-ebff-46a6-af0d-fd636ce39082 req-13f97486-96b5-454d-8a24-98f0b8550a00 service nova] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Received event network-changed-1c238b05-af1e-4dd4-ab9e-3e9905a7acb5 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1335.593988] env[62740]: DEBUG nova.compute.manager [req-9657a928-ebff-46a6-af0d-fd636ce39082 req-13f97486-96b5-454d-8a24-98f0b8550a00 service nova] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Refreshing instance network info cache due to event network-changed-1c238b05-af1e-4dd4-ab9e-3e9905a7acb5. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1335.594216] env[62740]: DEBUG oslo_concurrency.lockutils [req-9657a928-ebff-46a6-af0d-fd636ce39082 req-13f97486-96b5-454d-8a24-98f0b8550a00 service nova] Acquiring lock "refresh_cache-2deff09f-d24f-4609-91f2-1585e8407c2a" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.594359] env[62740]: DEBUG oslo_concurrency.lockutils [req-9657a928-ebff-46a6-af0d-fd636ce39082 req-13f97486-96b5-454d-8a24-98f0b8550a00 service nova] Acquired lock "refresh_cache-2deff09f-d24f-4609-91f2-1585e8407c2a" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.594521] env[62740]: DEBUG nova.network.neutron [req-9657a928-ebff-46a6-af0d-fd636ce39082 req-13f97486-96b5-454d-8a24-98f0b8550a00 service nova] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Refreshing network info cache for port 1c238b05-af1e-4dd4-ab9e-3e9905a7acb5 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1335.594994] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-9657a928-ebff-46a6-af0d-fd636ce39082 req-13f97486-96b5-454d-8a24-98f0b8550a00 service nova] Expecting reply to msg 22db15b00d78461b80866f267f27adad in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1335.602391] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22db15b00d78461b80866f267f27adad [ 1335.868971] env[62740]: DEBUG nova.network.neutron [req-9657a928-ebff-46a6-af0d-fd636ce39082 req-13f97486-96b5-454d-8a24-98f0b8550a00 service nova] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Updated VIF entry in instance network info cache for port 1c238b05-af1e-4dd4-ab9e-3e9905a7acb5. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1335.869327] env[62740]: DEBUG nova.network.neutron [req-9657a928-ebff-46a6-af0d-fd636ce39082 req-13f97486-96b5-454d-8a24-98f0b8550a00 service nova] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Updating instance_info_cache with network_info: [{"id": "1c238b05-af1e-4dd4-ab9e-3e9905a7acb5", "address": "fa:16:3e:6f:3e:b2", "network": {"id": "70fb46bd-81f8-49d4-bb8f-cb2fb8b6944e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2105504489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "511c682375de4d5294723b0d656190d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c238b05-af", "ovs_interfaceid": "1c238b05-af1e-4dd4-ab9e-3e9905a7acb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.869863] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-9657a928-ebff-46a6-af0d-fd636ce39082 req-13f97486-96b5-454d-8a24-98f0b8550a00 service nova] Expecting reply to msg 5a15d9c42126434abb3185fda7b90e8d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1335.880017] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a15d9c42126434abb3185fda7b90e8d [ 1335.880626] env[62740]: DEBUG oslo_concurrency.lockutils [req-9657a928-ebff-46a6-af0d-fd636ce39082 req-13f97486-96b5-454d-8a24-98f0b8550a00 service nova] Releasing lock "refresh_cache-2deff09f-d24f-4609-91f2-1585e8407c2a" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1361.738739] env[62740]: WARNING oslo_vmware.rw_handles [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1361.738739] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1361.738739] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1361.738739] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1361.738739] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1361.738739] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1361.738739] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1361.738739] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1361.738739] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1361.738739] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1361.738739] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1361.738739] env[62740]: ERROR oslo_vmware.rw_handles [ 1361.739412] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/727c0b8a-a9a0-4d48-baa0-5181d74a8185/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1361.741867] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1361.742067] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Copying Virtual Disk [datastore2] vmware_temp/727c0b8a-a9a0-4d48-baa0-5181d74a8185/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/727c0b8a-a9a0-4d48-baa0-5181d74a8185/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1361.742349] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-73d3f2b8-2d46-4477-b9a0-5450a6185a45 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.751226] env[62740]: DEBUG oslo_vmware.api [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Waiting for the task: (returnval){ [ 1361.751226] env[62740]: value = "task-640263" [ 1361.751226] env[62740]: _type = "Task" [ 1361.751226] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.759685] env[62740]: DEBUG oslo_vmware.api [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Task: {'id': task-640263, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.261414] env[62740]: DEBUG oslo_vmware.exceptions [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1362.261716] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1362.262291] env[62740]: ERROR nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1362.262291] env[62740]: Faults: ['InvalidArgument'] [ 1362.262291] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Traceback (most recent call last): [ 1362.262291] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1362.262291] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] yield resources [ 1362.262291] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1362.262291] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] self.driver.spawn(context, instance, image_meta, [ 1362.262291] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1362.262291] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1362.262291] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1362.262291] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] self._fetch_image_if_missing(context, vi) [ 1362.262291] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1362.262679] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] image_cache(vi, tmp_image_ds_loc) [ 1362.262679] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1362.262679] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] vm_util.copy_virtual_disk( [ 1362.262679] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1362.262679] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] session._wait_for_task(vmdk_copy_task) [ 1362.262679] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1362.262679] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] return self.wait_for_task(task_ref) [ 1362.262679] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1362.262679] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] return evt.wait() [ 1362.262679] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1362.262679] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] result = hub.switch() [ 1362.262679] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1362.262679] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] return self.greenlet.switch() [ 1362.262969] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1362.262969] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] self.f(*self.args, **self.kw) [ 1362.262969] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1362.262969] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] raise exceptions.translate_fault(task_info.error) [ 1362.262969] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1362.262969] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Faults: ['InvalidArgument'] [ 1362.262969] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] [ 1362.262969] env[62740]: INFO nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Terminating instance [ 1362.264771] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.264771] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1362.264771] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eeb07ff9-db8a-4e19-8672-da92159adcec {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.266959] env[62740]: DEBUG nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1362.267170] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1362.267872] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a32192-a5d1-4a16-ab38-719d8e5465b7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.274263] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1362.274470] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-663b1e8f-fc4c-4ba2-9d88-38deb2da295b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.276971] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1362.277251] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1362.278255] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13576184-eaaa-497b-b821-65e0ab1344fa {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.284120] env[62740]: DEBUG oslo_vmware.api [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Waiting for the task: (returnval){ [ 1362.284120] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5266c911-983a-0923-adb1-1b27602ad85b" [ 1362.284120] env[62740]: _type = "Task" [ 1362.284120] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.290927] env[62740]: DEBUG oslo_vmware.api [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5266c911-983a-0923-adb1-1b27602ad85b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.342141] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1362.342394] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1362.342568] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Deleting the datastore file [datastore2] 43e4ddf4-230e-49f7-975f-ba99a6da9398 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1362.342827] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d515815-6121-45e2-bec0-e87b39897f49 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.348652] env[62740]: DEBUG oslo_vmware.api [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Waiting for the task: (returnval){ [ 1362.348652] env[62740]: value = "task-640265" [ 1362.348652] env[62740]: _type = "Task" [ 1362.348652] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.355803] env[62740]: DEBUG oslo_vmware.api [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Task: {'id': task-640265, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.794955] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1362.795355] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Creating directory with path [datastore2] vmware_temp/7f173b93-61e1-4fc1-a900-2a60f4318fb0/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1362.795458] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebcc8ba3-bd20-4441-ba4b-21b21b5ca1c2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.805960] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Created directory with path [datastore2] vmware_temp/7f173b93-61e1-4fc1-a900-2a60f4318fb0/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1362.806180] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Fetch image to [datastore2] vmware_temp/7f173b93-61e1-4fc1-a900-2a60f4318fb0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1362.806360] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/7f173b93-61e1-4fc1-a900-2a60f4318fb0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1362.807167] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d09bdf-6f4e-49c5-b370-b9ed9c7047d0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.814078] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7e38ff-44a0-4a1f-8b9d-5a6f8ce2962d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.822986] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abc4944-e20e-4ae1-8603-96cdb5453565 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.855927] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10afb32a-e903-4aae-b7c6-56b5e0e544ba {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.863756] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9bacbf66-8e12-4b44-8bbc-8146f7da1a85 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.865360] env[62740]: DEBUG oslo_vmware.api [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Task: {'id': task-640265, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077017} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.865590] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1362.865774] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1362.865941] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1362.866130] env[62740]: INFO nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1362.868121] env[62740]: DEBUG nova.compute.claims [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1362.868295] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.868527] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.870459] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 467ec5ebdf0f471297a9866c928997d6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1362.887821] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1362.915863] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 467ec5ebdf0f471297a9866c928997d6 [ 1362.939842] env[62740]: DEBUG oslo_vmware.rw_handles [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7f173b93-61e1-4fc1-a900-2a60f4318fb0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1363.004336] env[62740]: DEBUG oslo_vmware.rw_handles [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1363.004537] env[62740]: DEBUG oslo_vmware.rw_handles [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7f173b93-61e1-4fc1-a900-2a60f4318fb0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1363.181300] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f5426d-a1de-477f-a69d-e6195c267985 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.189989] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c81111a0-dc9b-462c-9a9a-0cb231729ab5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.218596] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43bdac2-173d-4074-a32a-b125cac9392c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.225368] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147e523f-9a5b-42a2-86b9-74841d4084cc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.238208] env[62740]: DEBUG nova.compute.provider_tree [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1363.238743] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 149924d8c4bd464cb1156c72e46f5b34 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1363.245934] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 149924d8c4bd464cb1156c72e46f5b34 [ 1363.246828] env[62740]: DEBUG nova.scheduler.client.report [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1363.249095] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg f9c2b703557e44f5b9dd3d565b30236a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1363.260026] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9c2b703557e44f5b9dd3d565b30236a [ 1363.260676] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.392s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.261221] env[62740]: ERROR nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1363.261221] env[62740]: Faults: ['InvalidArgument'] [ 1363.261221] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Traceback (most recent call last): [ 1363.261221] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1363.261221] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] self.driver.spawn(context, instance, image_meta, [ 1363.261221] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1363.261221] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1363.261221] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1363.261221] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] self._fetch_image_if_missing(context, vi) [ 1363.261221] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1363.261221] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] image_cache(vi, tmp_image_ds_loc) [ 1363.261221] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1363.261520] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] vm_util.copy_virtual_disk( [ 1363.261520] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1363.261520] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] session._wait_for_task(vmdk_copy_task) [ 1363.261520] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1363.261520] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] return self.wait_for_task(task_ref) [ 1363.261520] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1363.261520] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] return evt.wait() [ 1363.261520] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1363.261520] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] result = hub.switch() [ 1363.261520] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1363.261520] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] return self.greenlet.switch() [ 1363.261520] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1363.261520] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] self.f(*self.args, **self.kw) [ 1363.261942] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1363.261942] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] raise exceptions.translate_fault(task_info.error) [ 1363.261942] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1363.261942] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Faults: ['InvalidArgument'] [ 1363.261942] env[62740]: ERROR nova.compute.manager [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] [ 1363.261942] env[62740]: DEBUG nova.compute.utils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1363.263303] env[62740]: DEBUG nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Build of instance 43e4ddf4-230e-49f7-975f-ba99a6da9398 was re-scheduled: A specified parameter was not correct: fileType [ 1363.263303] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1363.263673] env[62740]: DEBUG nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1363.263847] env[62740]: DEBUG nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1363.264034] env[62740]: DEBUG nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1363.264201] env[62740]: DEBUG nova.network.neutron [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1363.635197] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 372a2d67aae742fb951d3d0f5d4e79bd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1363.644087] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 372a2d67aae742fb951d3d0f5d4e79bd [ 1363.644701] env[62740]: DEBUG nova.network.neutron [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.645271] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 0a162217e246463aa9b3deb2f4303236 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1363.659978] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a162217e246463aa9b3deb2f4303236 [ 1363.660614] env[62740]: INFO nova.compute.manager [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Took 0.40 seconds to deallocate network for instance. [ 1363.662386] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 7b28ac48c729447eb0ab2f6cbd5a8516 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1363.703593] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b28ac48c729447eb0ab2f6cbd5a8516 [ 1363.706456] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 7b88487a22c747caa5cd619d2f2c9d7c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1363.740618] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b88487a22c747caa5cd619d2f2c9d7c [ 1363.772843] env[62740]: INFO nova.scheduler.client.report [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Deleted allocations for instance 43e4ddf4-230e-49f7-975f-ba99a6da9398 [ 1363.779151] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 2e9fb7cd08ac45c19de86e217d88745a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1363.797910] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e9fb7cd08ac45c19de86e217d88745a [ 1363.798786] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dd269bcb-38e5-4a77-ad21-c0605b5cd84a tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Lock "43e4ddf4-230e-49f7-975f-ba99a6da9398" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 322.171s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.799106] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg afb446cf3b6648cf806b84029a5c33d8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1363.799868] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Lock "43e4ddf4-230e-49f7-975f-ba99a6da9398" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 125.843s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.800096] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Acquiring lock "43e4ddf4-230e-49f7-975f-ba99a6da9398-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.800303] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Lock "43e4ddf4-230e-49f7-975f-ba99a6da9398-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.800472] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Lock "43e4ddf4-230e-49f7-975f-ba99a6da9398-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.802383] env[62740]: INFO nova.compute.manager [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Terminating instance [ 1363.804060] env[62740]: DEBUG nova.compute.manager [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1363.804324] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1363.804776] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7fc8b158-61df-4a44-a1d5-371ed656789c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.814180] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b82766-5346-47dc-bd60-38d75e1fae45 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.826086] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afb446cf3b6648cf806b84029a5c33d8 [ 1363.826555] env[62740]: DEBUG nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1363.828367] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 7338ed60b3cc478bbd970ab62c9ce295 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1363.846212] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 43e4ddf4-230e-49f7-975f-ba99a6da9398 could not be found. [ 1363.846419] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1363.846601] env[62740]: INFO nova.compute.manager [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1363.846847] env[62740]: DEBUG oslo.service.loopingcall [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1363.847084] env[62740]: DEBUG nova.compute.manager [-] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1363.847181] env[62740]: DEBUG nova.network.neutron [-] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1363.862032] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7338ed60b3cc478bbd970ab62c9ce295 [ 1363.869185] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a5f7381cc46d49c4b9fd619a6c6d8493 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1363.876701] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.876701] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.877955] env[62740]: INFO nova.compute.claims [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1363.879679] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 46aa921dc6cd44b7b42d10af28138dee in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1363.880659] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5f7381cc46d49c4b9fd619a6c6d8493 [ 1363.881139] env[62740]: DEBUG nova.network.neutron [-] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.881453] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7e28040d2e3445bbbf69c96db6249a31 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1363.890274] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e28040d2e3445bbbf69c96db6249a31 [ 1363.890701] env[62740]: INFO nova.compute.manager [-] [instance: 43e4ddf4-230e-49f7-975f-ba99a6da9398] Took 0.04 seconds to deallocate network for instance. [ 1363.894133] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 8c631b90476c42e9b9a15fbd2f74b9da in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1363.921421] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46aa921dc6cd44b7b42d10af28138dee [ 1363.923316] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 9de6cf271490416c89c508bfaa8d8804 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1363.931494] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c631b90476c42e9b9a15fbd2f74b9da [ 1363.931924] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9de6cf271490416c89c508bfaa8d8804 [ 1363.948689] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 80a08f8c6c7d4665a7066851139b972d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1363.989890] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80a08f8c6c7d4665a7066851139b972d [ 1363.992877] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Lock "43e4ddf4-230e-49f7-975f-ba99a6da9398" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.193s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.992877] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d3c3cf16-af6f-45bb-a72a-980969b694bd tempest-VolumesAdminNegativeTest-306817694 tempest-VolumesAdminNegativeTest-306817694-project-member] Expecting reply to msg 936937a46f6a4705b1bcdf87f9ecc210 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1364.023776] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 936937a46f6a4705b1bcdf87f9ecc210 [ 1364.137155] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6821133a-f2a8-4c30-8864-38ceca885f56 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.144392] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23a4dbb-4ec3-472a-8c81-684872a0cc4d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.174997] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b8f0989-b7ed-4edd-bdc2-5d4f921af3f7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.182051] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b0cec0-0eb4-49e1-ab16-437761a22e30 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.194687] env[62740]: DEBUG nova.compute.provider_tree [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1364.195172] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 53c694fabbf04f0bb2f4f36be8cc44e4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1364.202711] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53c694fabbf04f0bb2f4f36be8cc44e4 [ 1364.203618] env[62740]: DEBUG nova.scheduler.client.report [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1364.206057] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg cb99a94b395e4394bc5faf397ed04837 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1364.219357] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb99a94b395e4394bc5faf397ed04837 [ 1364.220163] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.344s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.220635] env[62740]: DEBUG nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1364.222278] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 11f4f67dbe1542b2aa5781cd08c8f9c8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1364.254045] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11f4f67dbe1542b2aa5781cd08c8f9c8 [ 1364.254965] env[62740]: DEBUG nova.compute.utils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1364.255544] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg c7c363fe336e4476a5a90f7ff3e35644 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1364.256532] env[62740]: DEBUG nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1364.256695] env[62740]: DEBUG nova.network.neutron [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1364.265281] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7c363fe336e4476a5a90f7ff3e35644 [ 1364.265788] env[62740]: DEBUG nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1364.267441] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg d246496ee7c94c738972084a7c53f8f5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1364.295982] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d246496ee7c94c738972084a7c53f8f5 [ 1364.298837] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg edc9324548f14e839048ace66dd64435 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1364.316601] env[62740]: DEBUG nova.policy [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3061d1e1e6544388537275e2933b02f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '960954b067f841cf9dff2016571551bf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1364.327905] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edc9324548f14e839048ace66dd64435 [ 1364.329074] env[62740]: DEBUG nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1364.354513] env[62740]: DEBUG nova.virt.hardware [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1364.354867] env[62740]: DEBUG nova.virt.hardware [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1364.355110] env[62740]: DEBUG nova.virt.hardware [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1364.355763] env[62740]: DEBUG nova.virt.hardware [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1364.355763] env[62740]: DEBUG nova.virt.hardware [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1364.355895] env[62740]: DEBUG nova.virt.hardware [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1364.356099] env[62740]: DEBUG nova.virt.hardware [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1364.356401] env[62740]: DEBUG nova.virt.hardware [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1364.356561] env[62740]: DEBUG nova.virt.hardware [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1364.356827] env[62740]: DEBUG nova.virt.hardware [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1364.357096] env[62740]: DEBUG nova.virt.hardware [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1364.358106] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45acd255-7f46-4eb1-9f10-dfc2267d5fc9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.368019] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02eb016-8ff7-4b94-b9bb-07a283d32f84 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.691672] env[62740]: DEBUG nova.network.neutron [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Successfully created port: 6992b25f-4680-43cb-8b4f-857b90b1c2e2 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1365.481075] env[62740]: DEBUG nova.network.neutron [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Successfully updated port: 6992b25f-4680-43cb-8b4f-857b90b1c2e2 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1365.481075] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 7f6c5c49894a49dd8dc15370e808fd8f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1365.490142] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f6c5c49894a49dd8dc15370e808fd8f [ 1365.490867] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "refresh_cache-3f36f081-2851-4339-860d-0a302ef4ee2c" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1365.491020] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquired lock "refresh_cache-3f36f081-2851-4339-860d-0a302ef4ee2c" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.491191] env[62740]: DEBUG nova.network.neutron [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1365.491694] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg cca0bd4d99fd44808e9d3f4829e79a5f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1365.501704] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cca0bd4d99fd44808e9d3f4829e79a5f [ 1365.733177] env[62740]: DEBUG nova.compute.manager [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Received event network-vif-plugged-6992b25f-4680-43cb-8b4f-857b90b1c2e2 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1365.733418] env[62740]: DEBUG oslo_concurrency.lockutils [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] Acquiring lock "3f36f081-2851-4339-860d-0a302ef4ee2c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1365.733625] env[62740]: DEBUG oslo_concurrency.lockutils [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] Lock "3f36f081-2851-4339-860d-0a302ef4ee2c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.733797] env[62740]: DEBUG oslo_concurrency.lockutils [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] Lock "3f36f081-2851-4339-860d-0a302ef4ee2c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1365.733968] env[62740]: DEBUG nova.compute.manager [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] No waiting events found dispatching network-vif-plugged-6992b25f-4680-43cb-8b4f-857b90b1c2e2 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1365.738289] env[62740]: WARNING nova.compute.manager [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Received unexpected event network-vif-plugged-6992b25f-4680-43cb-8b4f-857b90b1c2e2 for instance with vm_state building and task_state spawning. [ 1365.739380] env[62740]: DEBUG nova.compute.manager [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Received event network-changed-6992b25f-4680-43cb-8b4f-857b90b1c2e2 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1365.741253] env[62740]: DEBUG nova.compute.manager [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Refreshing instance network info cache due to event network-changed-6992b25f-4680-43cb-8b4f-857b90b1c2e2. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1365.741467] env[62740]: DEBUG oslo_concurrency.lockutils [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] Acquiring lock "refresh_cache-3f36f081-2851-4339-860d-0a302ef4ee2c" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1365.742463] env[62740]: DEBUG nova.network.neutron [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1365.932034] env[62740]: DEBUG nova.network.neutron [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Updating instance_info_cache with network_info: [{"id": "6992b25f-4680-43cb-8b4f-857b90b1c2e2", "address": "fa:16:3e:50:ed:fd", "network": {"id": "911124ba-af71-4345-a712-1d9f1b0ec94d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1164437049-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "960954b067f841cf9dff2016571551bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6992b25f-46", "ovs_interfaceid": "6992b25f-4680-43cb-8b4f-857b90b1c2e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.932503] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg c26bfc0e0eb042b687dd55d79900c341 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1365.944974] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c26bfc0e0eb042b687dd55d79900c341 [ 1365.945628] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Releasing lock "refresh_cache-3f36f081-2851-4339-860d-0a302ef4ee2c" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.945944] env[62740]: DEBUG nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Instance network_info: |[{"id": "6992b25f-4680-43cb-8b4f-857b90b1c2e2", "address": "fa:16:3e:50:ed:fd", "network": {"id": "911124ba-af71-4345-a712-1d9f1b0ec94d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1164437049-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "960954b067f841cf9dff2016571551bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6992b25f-46", "ovs_interfaceid": "6992b25f-4680-43cb-8b4f-857b90b1c2e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1365.946965] env[62740]: DEBUG oslo_concurrency.lockutils [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] Acquired lock "refresh_cache-3f36f081-2851-4339-860d-0a302ef4ee2c" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.946965] env[62740]: DEBUG nova.network.neutron [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Refreshing network info cache for port 6992b25f-4680-43cb-8b4f-857b90b1c2e2 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1365.946965] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] Expecting reply to msg dfb62380a5404482a9ee752945582c59 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1365.950687] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:ed:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6992b25f-4680-43cb-8b4f-857b90b1c2e2', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1365.956729] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Creating folder: Project (960954b067f841cf9dff2016571551bf). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1365.958105] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dfb62380a5404482a9ee752945582c59 [ 1365.958844] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb4d8030-176d-42ad-aae9-3b0851fd4d2a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.972363] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Created folder: Project (960954b067f841cf9dff2016571551bf) in parent group-v156037. [ 1365.972542] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Creating folder: Instances. Parent ref: group-v156153. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1365.972806] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e439530d-f311-4adb-af5a-8a35e12416d9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.983321] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Created folder: Instances in parent group-v156153. [ 1365.983834] env[62740]: DEBUG oslo.service.loopingcall [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1365.984084] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1365.984309] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02d0d8a4-62a5-4dfa-a9a9-922fcbe7bda8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.005609] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1366.005609] env[62740]: value = "task-640268" [ 1366.005609] env[62740]: _type = "Task" [ 1366.005609] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.016922] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640268, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.336535] env[62740]: DEBUG nova.network.neutron [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Updated VIF entry in instance network info cache for port 6992b25f-4680-43cb-8b4f-857b90b1c2e2. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1366.336535] env[62740]: DEBUG nova.network.neutron [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Updating instance_info_cache with network_info: [{"id": "6992b25f-4680-43cb-8b4f-857b90b1c2e2", "address": "fa:16:3e:50:ed:fd", "network": {"id": "911124ba-af71-4345-a712-1d9f1b0ec94d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1164437049-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "960954b067f841cf9dff2016571551bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6992b25f-46", "ovs_interfaceid": "6992b25f-4680-43cb-8b4f-857b90b1c2e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1366.336680] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] Expecting reply to msg 0e9977f78ae146bba218a03aa5299056 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1366.350069] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e9977f78ae146bba218a03aa5299056 [ 1366.350069] env[62740]: DEBUG oslo_concurrency.lockutils [req-6f5340ee-6209-40fb-9305-409454a03ee8 req-f2f35173-8c21-46aa-aaf1-a026517f6d12 service nova] Releasing lock "refresh_cache-3f36f081-2851-4339-860d-0a302ef4ee2c" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1366.521239] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640268, 'name': CreateVM_Task, 'duration_secs': 0.283223} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.521866] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1366.522905] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.523259] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.525022] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1366.525022] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89ea3c2f-c5b7-4244-b7f9-b02ea3281e8e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.528916] env[62740]: DEBUG oslo_vmware.api [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Waiting for the task: (returnval){ [ 1366.528916] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52118145-91e1-65d2-10ec-c9bac2c691fe" [ 1366.528916] env[62740]: _type = "Task" [ 1366.528916] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.538209] env[62740]: DEBUG oslo_vmware.api [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52118145-91e1-65d2-10ec-c9bac2c691fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.040816] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1367.040816] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1367.040816] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1370.183726] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 324dc5c164734d449057cc3a2ea40923 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1370.200082] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 324dc5c164734d449057cc3a2ea40923 [ 1370.905456] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg d93d9d585fdc4ca799308b4bd840d0a4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1370.917381] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d93d9d585fdc4ca799308b4bd840d0a4 [ 1370.918068] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "2deff09f-d24f-4609-91f2-1585e8407c2a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.891050] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1371.891354] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1372.886716] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1373.891164] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1373.891463] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1373.891463] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1373.892051] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5956605444fb4c3b812dd52a6352fa82 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1373.912561] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5956605444fb4c3b812dd52a6352fa82 [ 1373.914919] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1373.915083] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1373.915220] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 158406db-7196-4826-aefa-20a58daa186b] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1373.915348] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1373.915476] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1373.915612] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1373.915761] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1373.915888] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1373.916016] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1373.916159] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1373.916282] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1373.916989] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1374.823851] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "a41506d2-33b2-40b8-badb-41312c7abbd2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.824064] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "a41506d2-33b2-40b8-badb-41312c7abbd2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.892048] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1374.894013] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1374.894013] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1374.894013] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1374.894013] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg d0822f436a8c454e9c7ac263c07c6c94 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1374.911136] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0822f436a8c454e9c7ac263c07c6c94 [ 1374.914869] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.914869] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.914869] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.914869] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1374.914869] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b756f15f-7074-490b-898d-79038b9bb849 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.922946] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849f8f3c-cc62-4e2c-8d81-314819f7c2d2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.941908] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752a98d5-a111-4213-ad9f-f9b1627daab8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.949384] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423a5b22-3282-4b81-9430-fa96ddadf91f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.979220] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181626MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1374.982081] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.982081] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.982081] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5a1f609ade9f4e29b11bd0c0e04e9f0c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1375.025176] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a1f609ade9f4e29b11bd0c0e04e9f0c [ 1375.033021] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg a53b0eec3f084ea69a02d6f78a74d6fa in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1375.044692] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a53b0eec3f084ea69a02d6f78a74d6fa [ 1375.072838] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d8dac9af-0897-4fbf-8ee6-1fb3955d48c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.072838] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d6c3ca16-5c7c-41e6-9850-10221603ad2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.072838] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 158406db-7196-4826-aefa-20a58daa186b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.072838] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance eba85edb-4d86-42c9-8b49-98f2173a3eeb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.073103] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 7aacf4e0-b508-4a18-909a-3d1fe9458d98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.073103] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6005c9dc-3067-4719-a8f9-befb63f7cd8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.073103] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance ba23ede2-be42-48ac-b281-571ccd158dee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.073103] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.073268] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2deff09f-d24f-4609-91f2-1585e8407c2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.073480] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3f36f081-2851-4339-860d-0a302ef4ee2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.073832] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8ca496794bef4ebc874968741c83b359 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1375.084017] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ca496794bef4ebc874968741c83b359 [ 1375.084874] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 913ddb91-9d46-459e-8775-c9f380ed3cc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.085393] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 51be0a9da50d4dd58409a01492c797d7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1375.105605] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51be0a9da50d4dd58409a01492c797d7 [ 1375.106348] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 07efd13e-40d0-4158-b17c-6f5c75474ce3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.106860] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 475cef718e2e40328a707b320fa88df8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1375.122114] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 475cef718e2e40328a707b320fa88df8 [ 1375.122879] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 8a156903-e4cf-43ed-9c6a-962a06ff9ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.123413] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 3d012cd27f1b476f956b2683892c8257 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1375.135026] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d012cd27f1b476f956b2683892c8257 [ 1375.135730] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388d71f2-b229-4666-a53d-d5b07e498eed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.136396] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2b77a87886d14ef2b73eb4186b6d6e04 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1375.155020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b77a87886d14ef2b73eb4186b6d6e04 [ 1375.155020] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 58ae8579-4ea3-45ba-a982-10e0ca82874c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.155020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ba1a993111d74d59a16164bf95ded854 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1375.170922] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba1a993111d74d59a16164bf95ded854 [ 1375.171651] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c0daf074-eecb-4899-938f-477031efc6d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.172175] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8cf3d58d266d403d8bf39b4f57dc77bf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1375.182210] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cf3d58d266d403d8bf39b4f57dc77bf [ 1375.182905] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 61fea037-aac3-47ef-aa6a-5dfa657d840d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.183523] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 4d416e99077d40ccbdcac6be590ef12a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1375.192799] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d416e99077d40ccbdcac6be590ef12a [ 1375.193594] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 86c57375-8328-4344-b228-2f1ce6efc71e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.194085] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 635497aa2ae6477f9f23a0991c8093dc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1375.210822] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 635497aa2ae6477f9f23a0991c8093dc [ 1375.212532] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a41506d2-33b2-40b8-badb-41312c7abbd2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.212532] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1375.212532] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1375.462214] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d15fee9-a92f-45f1-81af-040e57b097f1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.471757] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2f3f09-2d2c-4784-9e98-99878a28ccb9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.500775] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d6e1ce-2070-4617-9b16-9d84cdd1b2d6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.507636] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6dab19-3dca-4da7-97f5-fab77a560135 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.520360] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1375.520838] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 128479bf693e40cdb1a73b94978603c4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1375.527604] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 128479bf693e40cdb1a73b94978603c4 [ 1375.528480] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1375.530724] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 78b511c34a3e45d78f02526d9485209f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1375.541477] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78b511c34a3e45d78f02526d9485209f [ 1375.542149] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1375.542335] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.563s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.541145] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1379.547826] env[62740]: WARNING oslo_vmware.rw_handles [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1379.547826] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1379.547826] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1379.547826] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1379.547826] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1379.547826] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1379.547826] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1379.547826] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1379.547826] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1379.547826] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1379.547826] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1379.547826] env[62740]: ERROR oslo_vmware.rw_handles [ 1379.548547] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/15c562bf-6ab6-458a-9532-394b8aeb98d0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1379.550503] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1379.550756] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Copying Virtual Disk [datastore1] vmware_temp/15c562bf-6ab6-458a-9532-394b8aeb98d0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/15c562bf-6ab6-458a-9532-394b8aeb98d0/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1379.551045] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4b730b7-5c96-40ab-9bcf-dd05083c24fb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.559063] env[62740]: DEBUG oslo_vmware.api [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for the task: (returnval){ [ 1379.559063] env[62740]: value = "task-640269" [ 1379.559063] env[62740]: _type = "Task" [ 1379.559063] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.567058] env[62740]: DEBUG oslo_vmware.api [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Task: {'id': task-640269, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.070058] env[62740]: DEBUG oslo_vmware.exceptions [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1380.070058] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1380.070586] env[62740]: ERROR nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1380.070586] env[62740]: Faults: ['InvalidArgument'] [ 1380.070586] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Traceback (most recent call last): [ 1380.070586] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1380.070586] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] yield resources [ 1380.070586] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1380.070586] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] self.driver.spawn(context, instance, image_meta, [ 1380.070586] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1380.070586] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1380.070586] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1380.070586] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] self._fetch_image_if_missing(context, vi) [ 1380.070586] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1380.070921] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] image_cache(vi, tmp_image_ds_loc) [ 1380.070921] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1380.070921] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] vm_util.copy_virtual_disk( [ 1380.070921] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1380.070921] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] session._wait_for_task(vmdk_copy_task) [ 1380.070921] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1380.070921] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] return self.wait_for_task(task_ref) [ 1380.070921] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1380.070921] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] return evt.wait() [ 1380.070921] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1380.070921] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] result = hub.switch() [ 1380.070921] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1380.070921] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] return self.greenlet.switch() [ 1380.071323] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1380.071323] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] self.f(*self.args, **self.kw) [ 1380.071323] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1380.071323] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] raise exceptions.translate_fault(task_info.error) [ 1380.071323] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1380.071323] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Faults: ['InvalidArgument'] [ 1380.071323] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] [ 1380.071323] env[62740]: INFO nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Terminating instance [ 1380.072647] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.073698] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1380.073698] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab3c4f5f-d8f7-4743-8cef-c5f54c32d4d3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.075267] env[62740]: DEBUG nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1380.075463] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1380.076221] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639432f9-c24e-4e5f-bf80-21cbf4dd2efd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.083162] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1380.083286] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b2d27ba-ecf3-4f6f-97ec-687a36c500b1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.085478] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1380.085647] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1380.086646] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b10e673b-c531-423e-b2c2-a3a1291ef63e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.091863] env[62740]: DEBUG oslo_vmware.api [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Waiting for the task: (returnval){ [ 1380.091863] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5251b142-dd5f-1950-e750-a10e957120d7" [ 1380.091863] env[62740]: _type = "Task" [ 1380.091863] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.100049] env[62740]: DEBUG oslo_vmware.api [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5251b142-dd5f-1950-e750-a10e957120d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.151258] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1380.151479] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1380.151662] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Deleting the datastore file [datastore1] d8dac9af-0897-4fbf-8ee6-1fb3955d48c0 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1380.151919] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f40c13a-51f3-4e72-b458-f8c8a30c5b1e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.157766] env[62740]: DEBUG oslo_vmware.api [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for the task: (returnval){ [ 1380.157766] env[62740]: value = "task-640271" [ 1380.157766] env[62740]: _type = "Task" [ 1380.157766] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.165096] env[62740]: DEBUG oslo_vmware.api [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Task: {'id': task-640271, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.602970] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1380.603337] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Creating directory with path [datastore1] vmware_temp/4e84ffc8-8332-4e77-a5b9-26b35a808bd5/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1380.603622] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-349ebfd7-2bf4-4c02-896a-ccbfc703f012 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.614808] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Created directory with path [datastore1] vmware_temp/4e84ffc8-8332-4e77-a5b9-26b35a808bd5/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1380.615101] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Fetch image to [datastore1] vmware_temp/4e84ffc8-8332-4e77-a5b9-26b35a808bd5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1380.615349] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/4e84ffc8-8332-4e77-a5b9-26b35a808bd5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1380.616115] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8efcc130-6fb5-4b52-b19a-804ab00e9dcf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.622694] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84663df0-6fae-4634-9115-3d98a953c0a6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.631638] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0b316c-0a0a-4ac3-8967-3ee125112a48 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.666033] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056e52ed-4a97-46b5-8b14-637039b54686 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.671067] env[62740]: DEBUG oslo_vmware.api [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Task: {'id': task-640271, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070022} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.672698] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1380.672789] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1380.672970] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1380.673191] env[62740]: INFO nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1380.674955] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-aeb4d49f-3107-4ef3-9033-5a8edb06b975 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.678242] env[62740]: DEBUG nova.compute.claims [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1380.678420] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.678682] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.680673] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg fe8b2eb9d08441309d60187c0f438d07 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1380.696386] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1380.717604] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe8b2eb9d08441309d60187c0f438d07 [ 1380.751939] env[62740]: DEBUG oslo_vmware.rw_handles [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4e84ffc8-8332-4e77-a5b9-26b35a808bd5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1380.812988] env[62740]: DEBUG oslo_vmware.rw_handles [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1380.813215] env[62740]: DEBUG oslo_vmware.rw_handles [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4e84ffc8-8332-4e77-a5b9-26b35a808bd5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1380.992119] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789fbfd7-6b8d-4a06-87ca-fb33cd27d121 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.999596] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3efb9ce-a8bf-431d-9f1e-f84c16a17c71 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.029158] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b8a1336-33c5-482e-bc81-7dc4b6106be5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.036296] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b198b317-a4a7-4e1a-874d-301971f19d27 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.049225] env[62740]: DEBUG nova.compute.provider_tree [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1381.049751] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg d23628ed8e1c4852941b6dc43c7e049e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.056961] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d23628ed8e1c4852941b6dc43c7e049e [ 1381.057986] env[62740]: DEBUG nova.scheduler.client.report [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1381.060679] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 5079665a52b2403c97657504eb2b6be5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.072117] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5079665a52b2403c97657504eb2b6be5 [ 1381.072786] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.394s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.073308] env[62740]: ERROR nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1381.073308] env[62740]: Faults: ['InvalidArgument'] [ 1381.073308] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Traceback (most recent call last): [ 1381.073308] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1381.073308] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] self.driver.spawn(context, instance, image_meta, [ 1381.073308] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1381.073308] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1381.073308] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1381.073308] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] self._fetch_image_if_missing(context, vi) [ 1381.073308] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1381.073308] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] image_cache(vi, tmp_image_ds_loc) [ 1381.073308] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1381.073654] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] vm_util.copy_virtual_disk( [ 1381.073654] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1381.073654] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] session._wait_for_task(vmdk_copy_task) [ 1381.073654] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1381.073654] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] return self.wait_for_task(task_ref) [ 1381.073654] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1381.073654] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] return evt.wait() [ 1381.073654] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1381.073654] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] result = hub.switch() [ 1381.073654] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1381.073654] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] return self.greenlet.switch() [ 1381.073654] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1381.073654] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] self.f(*self.args, **self.kw) [ 1381.074061] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1381.074061] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] raise exceptions.translate_fault(task_info.error) [ 1381.074061] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1381.074061] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Faults: ['InvalidArgument'] [ 1381.074061] env[62740]: ERROR nova.compute.manager [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] [ 1381.074061] env[62740]: DEBUG nova.compute.utils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1381.075439] env[62740]: DEBUG nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Build of instance d8dac9af-0897-4fbf-8ee6-1fb3955d48c0 was re-scheduled: A specified parameter was not correct: fileType [ 1381.075439] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1381.075815] env[62740]: DEBUG nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1381.075991] env[62740]: DEBUG nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1381.076176] env[62740]: DEBUG nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1381.076342] env[62740]: DEBUG nova.network.neutron [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1381.436135] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg fdb2e1ead711446d8b0a2b5136c5fd24 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.448839] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdb2e1ead711446d8b0a2b5136c5fd24 [ 1381.449395] env[62740]: DEBUG nova.network.neutron [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.449926] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 6627d9c9637e4e7face8407412477a9c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.459796] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6627d9c9637e4e7face8407412477a9c [ 1381.460437] env[62740]: INFO nova.compute.manager [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Took 0.38 seconds to deallocate network for instance. [ 1381.462543] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg be0ba1dab4c7402c962ab402d52ef08c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.499918] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be0ba1dab4c7402c962ab402d52ef08c [ 1381.502566] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg f16e915ac70045438257b3bd3f29a582 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.534991] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f16e915ac70045438257b3bd3f29a582 [ 1381.561023] env[62740]: INFO nova.scheduler.client.report [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Deleted allocations for instance d8dac9af-0897-4fbf-8ee6-1fb3955d48c0 [ 1381.566284] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg c1ea068f881341ada79aff59f949deba in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.584956] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1ea068f881341ada79aff59f949deba [ 1381.585614] env[62740]: DEBUG oslo_concurrency.lockutils [None req-220bd9d7-2988-4ca9-87dc-596aab0c6404 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "d8dac9af-0897-4fbf-8ee6-1fb3955d48c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 544.092s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.586215] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg ee2fdde05dd2438e9023a193656bbd1c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.586999] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "d8dac9af-0897-4fbf-8ee6-1fb3955d48c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 346.448s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.587240] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "d8dac9af-0897-4fbf-8ee6-1fb3955d48c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.587455] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "d8dac9af-0897-4fbf-8ee6-1fb3955d48c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.588328] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "d8dac9af-0897-4fbf-8ee6-1fb3955d48c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.590053] env[62740]: INFO nova.compute.manager [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Terminating instance [ 1381.592162] env[62740]: DEBUG nova.compute.manager [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1381.592363] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1381.592851] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7dd5ffb1-1201-4e35-9503-0c229570ffc2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.599721] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee2fdde05dd2438e9023a193656bbd1c [ 1381.602806] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684d7eb5-a69f-4666-be33-2ba76f503901 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.613188] env[62740]: DEBUG nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1381.614822] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg bec2c58fbf4846a79abc5f6f0af4c0ad in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.634177] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d8dac9af-0897-4fbf-8ee6-1fb3955d48c0 could not be found. [ 1381.634382] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1381.634561] env[62740]: INFO nova.compute.manager [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1381.634806] env[62740]: DEBUG oslo.service.loopingcall [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1381.635063] env[62740]: DEBUG nova.compute.manager [-] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1381.635139] env[62740]: DEBUG nova.network.neutron [-] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1381.648286] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bec2c58fbf4846a79abc5f6f0af4c0ad [ 1381.658263] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 98c06d5b15424a69bb6a474e799e98bf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.665618] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98c06d5b15424a69bb6a474e799e98bf [ 1381.665972] env[62740]: DEBUG nova.network.neutron [-] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.666367] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6e23425bd3ea4e6b875ff653a37139b2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.668077] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.668077] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.669528] env[62740]: INFO nova.compute.claims [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1381.671122] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 00dd4ca8979e444abbec05a347ce1241 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.673664] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e23425bd3ea4e6b875ff653a37139b2 [ 1381.674089] env[62740]: INFO nova.compute.manager [-] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] Took 0.04 seconds to deallocate network for instance. [ 1381.677292] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 8febf8b75902434aa04fc611479e4b36 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.702088] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8febf8b75902434aa04fc611479e4b36 [ 1381.705156] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00dd4ca8979e444abbec05a347ce1241 [ 1381.707682] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg c55865b2456745828fe3afed58bb0ae8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.713386] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c55865b2456745828fe3afed58bb0ae8 [ 1381.720220] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 203e36132cf84a0b9786db83fa0bf563 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.755249] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 203e36132cf84a0b9786db83fa0bf563 [ 1381.760332] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "d8dac9af-0897-4fbf-8ee6-1fb3955d48c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.173s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.760664] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2ccbe059-93c0-460f-8d44-52053d9c16a5 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg b2327f806ec2422a925807d4e4f11a51 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.761491] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "d8dac9af-0897-4fbf-8ee6-1fb3955d48c0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 199.503s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.761681] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d8dac9af-0897-4fbf-8ee6-1fb3955d48c0] During sync_power_state the instance has a pending task (deleting). Skip. [ 1381.761879] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "d8dac9af-0897-4fbf-8ee6-1fb3955d48c0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.771612] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2327f806ec2422a925807d4e4f11a51 [ 1381.886424] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1381.887126] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 09dbc1767df047b6b0dc00ac369d87bf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.906521] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09dbc1767df047b6b0dc00ac369d87bf [ 1381.916901] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ab277f-67b6-43f4-aa78-14b7784337a0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.924825] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47308ec3-d614-4e2c-927f-92cd74e0d2fa {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.955367] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3eabd77-b329-4546-ad41-96687cdf50b6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.962199] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd41f64-a659-4921-aa6b-29f92ab147b0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.975819] env[62740]: DEBUG nova.compute.provider_tree [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1381.976314] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg a865a8e5feab4a89b00b1e1919274c1e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1381.985028] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a865a8e5feab4a89b00b1e1919274c1e [ 1381.985028] env[62740]: DEBUG nova.scheduler.client.report [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1381.987179] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 3d8dfda3fed74eba953043db8f48b000 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1382.000972] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d8dfda3fed74eba953043db8f48b000 [ 1382.001670] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.334s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.002142] env[62740]: DEBUG nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1382.003709] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg ce5f0cbd31914f11b2ef14a714b8c869 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1382.032034] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce5f0cbd31914f11b2ef14a714b8c869 [ 1382.033229] env[62740]: DEBUG nova.compute.utils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1382.034127] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 9488e14e17234b0bb017049de97631c2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1382.034884] env[62740]: DEBUG nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1382.035063] env[62740]: DEBUG nova.network.neutron [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1382.046546] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9488e14e17234b0bb017049de97631c2 [ 1382.047065] env[62740]: DEBUG nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1382.048718] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg b389209bb8364dd4a5b1b3be2d0c108e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1382.078032] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b389209bb8364dd4a5b1b3be2d0c108e [ 1382.080608] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg ae0567bafc2e46faadea87b24c302e1b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1382.112672] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae0567bafc2e46faadea87b24c302e1b [ 1382.113848] env[62740]: DEBUG nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1382.140218] env[62740]: DEBUG nova.virt.hardware [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1382.140463] env[62740]: DEBUG nova.virt.hardware [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1382.140626] env[62740]: DEBUG nova.virt.hardware [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1382.140812] env[62740]: DEBUG nova.virt.hardware [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1382.141068] env[62740]: DEBUG nova.virt.hardware [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1382.141138] env[62740]: DEBUG nova.virt.hardware [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1382.141329] env[62740]: DEBUG nova.virt.hardware [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1382.141490] env[62740]: DEBUG nova.virt.hardware [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1382.141659] env[62740]: DEBUG nova.virt.hardware [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1382.141824] env[62740]: DEBUG nova.virt.hardware [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1382.141996] env[62740]: DEBUG nova.virt.hardware [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1382.142859] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce50936-27a0-4dfd-a7b6-83da9590a651 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.148063] env[62740]: DEBUG nova.policy [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '017aef872af749f3833b65f279808836', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e3dba04fe444522a3b09a32eeb47140', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1382.152832] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292d46ff-7b16-4acf-85f1-e872623e6b78 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.486610] env[62740]: DEBUG nova.network.neutron [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Successfully created port: 9b012013-e0cb-4128-a8d3-6895e7e759e5 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1383.308346] env[62740]: DEBUG nova.network.neutron [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Successfully updated port: 9b012013-e0cb-4128-a8d3-6895e7e759e5 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1383.308346] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 80ef61b5f4dd40978f3f8ca640ccdad4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1383.320457] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80ef61b5f4dd40978f3f8ca640ccdad4 [ 1383.320457] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "refresh_cache-913ddb91-9d46-459e-8775-c9f380ed3cc4" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.320457] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquired lock "refresh_cache-913ddb91-9d46-459e-8775-c9f380ed3cc4" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.320457] env[62740]: DEBUG nova.network.neutron [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1383.320457] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 791a612af16444c081e949debc50e034 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1383.331137] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 791a612af16444c081e949debc50e034 [ 1383.389170] env[62740]: DEBUG nova.network.neutron [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1383.485510] env[62740]: DEBUG nova.compute.manager [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Received event network-vif-plugged-9b012013-e0cb-4128-a8d3-6895e7e759e5 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1383.485510] env[62740]: DEBUG oslo_concurrency.lockutils [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] Acquiring lock "913ddb91-9d46-459e-8775-c9f380ed3cc4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.486151] env[62740]: DEBUG oslo_concurrency.lockutils [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] Lock "913ddb91-9d46-459e-8775-c9f380ed3cc4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.486303] env[62740]: DEBUG oslo_concurrency.lockutils [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] Lock "913ddb91-9d46-459e-8775-c9f380ed3cc4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.486481] env[62740]: DEBUG nova.compute.manager [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] No waiting events found dispatching network-vif-plugged-9b012013-e0cb-4128-a8d3-6895e7e759e5 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1383.486651] env[62740]: WARNING nova.compute.manager [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Received unexpected event network-vif-plugged-9b012013-e0cb-4128-a8d3-6895e7e759e5 for instance with vm_state building and task_state spawning. [ 1383.486813] env[62740]: DEBUG nova.compute.manager [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Received event network-changed-9b012013-e0cb-4128-a8d3-6895e7e759e5 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1383.486971] env[62740]: DEBUG nova.compute.manager [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Refreshing instance network info cache due to event network-changed-9b012013-e0cb-4128-a8d3-6895e7e759e5. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1383.487154] env[62740]: DEBUG oslo_concurrency.lockutils [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] Acquiring lock "refresh_cache-913ddb91-9d46-459e-8775-c9f380ed3cc4" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.588597] env[62740]: DEBUG nova.network.neutron [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Updating instance_info_cache with network_info: [{"id": "9b012013-e0cb-4128-a8d3-6895e7e759e5", "address": "fa:16:3e:bb:f8:35", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.123", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b012013-e0", "ovs_interfaceid": "9b012013-e0cb-4128-a8d3-6895e7e759e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.589145] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg c85e1ba3d7644bb6ab5eacedfa3415d5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1383.601441] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c85e1ba3d7644bb6ab5eacedfa3415d5 [ 1383.601982] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Releasing lock "refresh_cache-913ddb91-9d46-459e-8775-c9f380ed3cc4" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.602276] env[62740]: DEBUG nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Instance network_info: |[{"id": "9b012013-e0cb-4128-a8d3-6895e7e759e5", "address": "fa:16:3e:bb:f8:35", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.123", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b012013-e0", "ovs_interfaceid": "9b012013-e0cb-4128-a8d3-6895e7e759e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1383.602557] env[62740]: DEBUG oslo_concurrency.lockutils [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] Acquired lock "refresh_cache-913ddb91-9d46-459e-8775-c9f380ed3cc4" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.602735] env[62740]: DEBUG nova.network.neutron [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Refreshing network info cache for port 9b012013-e0cb-4128-a8d3-6895e7e759e5 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1383.603131] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] Expecting reply to msg 5bb51739ea3d4a1998abb37553e0b05c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1383.603920] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:f8:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9b012013-e0cb-4128-a8d3-6895e7e759e5', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1383.611884] env[62740]: DEBUG oslo.service.loopingcall [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1383.612478] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5bb51739ea3d4a1998abb37553e0b05c [ 1383.612812] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1383.615355] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7a85a06-0aa4-41c1-bacc-1383ca2620f6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.634790] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1383.634790] env[62740]: value = "task-640272" [ 1383.634790] env[62740]: _type = "Task" [ 1383.634790] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.642187] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640272, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.915058] env[62740]: DEBUG nova.network.neutron [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Updated VIF entry in instance network info cache for port 9b012013-e0cb-4128-a8d3-6895e7e759e5. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1383.915433] env[62740]: DEBUG nova.network.neutron [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Updating instance_info_cache with network_info: [{"id": "9b012013-e0cb-4128-a8d3-6895e7e759e5", "address": "fa:16:3e:bb:f8:35", "network": {"id": "304597cd-4bd9-403c-8f5b-990e2a1efabc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.123", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "47f0062f3bf04910bbbb3502a2f3ff28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b012013-e0", "ovs_interfaceid": "9b012013-e0cb-4128-a8d3-6895e7e759e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.915994] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] Expecting reply to msg d074d45d86a54060b3603b5c5afbb85e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1383.925075] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d074d45d86a54060b3603b5c5afbb85e [ 1383.925768] env[62740]: DEBUG oslo_concurrency.lockutils [req-c3439dbd-6771-4d8a-9a45-6ca69ccac4d8 req-909b3819-1348-4ed3-a2dc-2acc4d0bdaee service nova] Releasing lock "refresh_cache-913ddb91-9d46-459e-8775-c9f380ed3cc4" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.145231] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640272, 'name': CreateVM_Task, 'duration_secs': 0.283323} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.145347] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1384.145959] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.146532] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.146532] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1384.147307] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42cc11b5-23ed-469d-a2d4-83bcc1250437 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.151107] env[62740]: DEBUG oslo_vmware.api [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Waiting for the task: (returnval){ [ 1384.151107] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5292f61d-c1a3-8545-91bd-72bd79c4651c" [ 1384.151107] env[62740]: _type = "Task" [ 1384.151107] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.159564] env[62740]: DEBUG oslo_vmware.api [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5292f61d-c1a3-8545-91bd-72bd79c4651c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.661495] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.661807] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1384.661928] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.633912] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 7a6136e02766473cbc122c466b0b259c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1388.644221] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a6136e02766473cbc122c466b0b259c [ 1388.644721] env[62740]: DEBUG oslo_concurrency.lockutils [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "3f36f081-2851-4339-860d-0a302ef4ee2c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.756030] env[62740]: WARNING oslo_vmware.rw_handles [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1411.756030] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1411.756030] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1411.756030] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1411.756030] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1411.756030] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1411.756030] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1411.756030] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1411.756030] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1411.756030] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1411.756030] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1411.756030] env[62740]: ERROR oslo_vmware.rw_handles [ 1411.756030] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/7f173b93-61e1-4fc1-a900-2a60f4318fb0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1411.758320] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1411.758657] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Copying Virtual Disk [datastore2] vmware_temp/7f173b93-61e1-4fc1-a900-2a60f4318fb0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/7f173b93-61e1-4fc1-a900-2a60f4318fb0/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1411.759029] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79bb6f39-007c-41bc-b99e-e10e75e42f07 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.767786] env[62740]: DEBUG oslo_vmware.api [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Waiting for the task: (returnval){ [ 1411.767786] env[62740]: value = "task-640273" [ 1411.767786] env[62740]: _type = "Task" [ 1411.767786] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.776147] env[62740]: DEBUG oslo_vmware.api [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Task: {'id': task-640273, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.278607] env[62740]: DEBUG oslo_vmware.exceptions [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1412.278954] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.279625] env[62740]: ERROR nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1412.279625] env[62740]: Faults: ['InvalidArgument'] [ 1412.279625] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Traceback (most recent call last): [ 1412.279625] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1412.279625] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] yield resources [ 1412.279625] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1412.279625] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] self.driver.spawn(context, instance, image_meta, [ 1412.279625] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1412.279625] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1412.279625] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1412.279625] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] self._fetch_image_if_missing(context, vi) [ 1412.279625] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1412.280037] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] image_cache(vi, tmp_image_ds_loc) [ 1412.280037] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1412.280037] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] vm_util.copy_virtual_disk( [ 1412.280037] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1412.280037] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] session._wait_for_task(vmdk_copy_task) [ 1412.280037] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1412.280037] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] return self.wait_for_task(task_ref) [ 1412.280037] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1412.280037] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] return evt.wait() [ 1412.280037] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1412.280037] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] result = hub.switch() [ 1412.280037] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1412.280037] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] return self.greenlet.switch() [ 1412.280432] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1412.280432] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] self.f(*self.args, **self.kw) [ 1412.280432] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1412.280432] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] raise exceptions.translate_fault(task_info.error) [ 1412.280432] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1412.280432] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Faults: ['InvalidArgument'] [ 1412.280432] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] [ 1412.280432] env[62740]: INFO nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Terminating instance [ 1412.281922] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.281922] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1412.283237] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1fdc3f5-130f-4152-9fdc-698014eefa82 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.284291] env[62740]: DEBUG nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1412.284612] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1412.285245] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559f17e9-511d-49ed-94c1-f488bf2726d4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.292322] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1412.293306] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-697da457-5b0a-43e2-9f42-b5ee8238b9d5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.294651] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1412.294829] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1412.295496] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d5a9f8c-baf9-4cf0-b05d-1d6994fb6c1b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.300754] env[62740]: DEBUG oslo_vmware.api [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Waiting for the task: (returnval){ [ 1412.300754] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]529da262-b709-6e8f-4b83-e02b2615d3c9" [ 1412.300754] env[62740]: _type = "Task" [ 1412.300754] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.307708] env[62740]: DEBUG oslo_vmware.api [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]529da262-b709-6e8f-4b83-e02b2615d3c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.374519] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1412.374685] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1412.374854] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Deleting the datastore file [datastore2] eba85edb-4d86-42c9-8b49-98f2173a3eeb {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1412.375139] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07d118aa-f786-4b8d-887c-45e698587bf8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.380998] env[62740]: DEBUG oslo_vmware.api [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Waiting for the task: (returnval){ [ 1412.380998] env[62740]: value = "task-640275" [ 1412.380998] env[62740]: _type = "Task" [ 1412.380998] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.388187] env[62740]: DEBUG oslo_vmware.api [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Task: {'id': task-640275, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.811395] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1412.811700] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Creating directory with path [datastore2] vmware_temp/9bc7c4c5-ca5e-4f81-aeaf-26770bda8b4f/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1412.811883] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9cc76678-209e-4eb7-a863-c5b4473150be {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.822940] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Created directory with path [datastore2] vmware_temp/9bc7c4c5-ca5e-4f81-aeaf-26770bda8b4f/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1412.823153] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Fetch image to [datastore2] vmware_temp/9bc7c4c5-ca5e-4f81-aeaf-26770bda8b4f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1412.823334] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/9bc7c4c5-ca5e-4f81-aeaf-26770bda8b4f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1412.824056] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37c6e7c-9743-4511-ace6-16657b543726 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.830271] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839c2fc3-343e-4211-897b-37c3c26003f1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.840069] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b990cc5-a78d-4310-89f0-6b475b58dd10 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.869658] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1107b2a4-9df8-4457-9239-198783b76612 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.874976] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8d59b885-f89c-4c7b-af97-131f1aae2ab7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.888951] env[62740]: DEBUG oslo_vmware.api [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Task: {'id': task-640275, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069854} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.889173] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1412.889362] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1412.889537] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1412.889741] env[62740]: INFO nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1412.891858] env[62740]: DEBUG nova.compute.claims [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1412.892046] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.892262] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.894143] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg 0931a7693b924c5bb4c59120cefd0055 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1412.897721] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1412.933267] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0931a7693b924c5bb4c59120cefd0055 [ 1412.954398] env[62740]: DEBUG oslo_vmware.rw_handles [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9bc7c4c5-ca5e-4f81-aeaf-26770bda8b4f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1413.019691] env[62740]: DEBUG oslo_vmware.rw_handles [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1413.019900] env[62740]: DEBUG oslo_vmware.rw_handles [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9bc7c4c5-ca5e-4f81-aeaf-26770bda8b4f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1413.183423] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f13db3b-3c65-42a6-a496-2304e5dd639f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.191057] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678f530c-9216-4f6f-964e-c5fcedbc1d0b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.220391] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698001ca-114b-4698-87bb-c4e99c1274df {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.227087] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8a551d-a7aa-4ffd-862f-66a781833b92 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.239610] env[62740]: DEBUG nova.compute.provider_tree [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1413.240101] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg 80343abe56c64eb2aa35e3d8f88a7038 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.248184] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80343abe56c64eb2aa35e3d8f88a7038 [ 1413.249099] env[62740]: DEBUG nova.scheduler.client.report [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1413.251421] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg cfdfa70ad4344ea78f3bb76839b359dc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.262137] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfdfa70ad4344ea78f3bb76839b359dc [ 1413.262767] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.370s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.263337] env[62740]: ERROR nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1413.263337] env[62740]: Faults: ['InvalidArgument'] [ 1413.263337] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Traceback (most recent call last): [ 1413.263337] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1413.263337] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] self.driver.spawn(context, instance, image_meta, [ 1413.263337] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1413.263337] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1413.263337] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1413.263337] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] self._fetch_image_if_missing(context, vi) [ 1413.263337] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1413.263337] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] image_cache(vi, tmp_image_ds_loc) [ 1413.263337] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1413.263731] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] vm_util.copy_virtual_disk( [ 1413.263731] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1413.263731] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] session._wait_for_task(vmdk_copy_task) [ 1413.263731] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1413.263731] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] return self.wait_for_task(task_ref) [ 1413.263731] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1413.263731] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] return evt.wait() [ 1413.263731] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1413.263731] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] result = hub.switch() [ 1413.263731] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1413.263731] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] return self.greenlet.switch() [ 1413.263731] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1413.263731] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] self.f(*self.args, **self.kw) [ 1413.264213] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1413.264213] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] raise exceptions.translate_fault(task_info.error) [ 1413.264213] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1413.264213] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Faults: ['InvalidArgument'] [ 1413.264213] env[62740]: ERROR nova.compute.manager [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] [ 1413.264213] env[62740]: DEBUG nova.compute.utils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1413.265449] env[62740]: DEBUG nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Build of instance eba85edb-4d86-42c9-8b49-98f2173a3eeb was re-scheduled: A specified parameter was not correct: fileType [ 1413.265449] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1413.265823] env[62740]: DEBUG nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1413.265995] env[62740]: DEBUG nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1413.266182] env[62740]: DEBUG nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1413.266419] env[62740]: DEBUG nova.network.neutron [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1413.613975] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg bbb2ed4f593e4593add1c9e0f4109cf8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.623603] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbb2ed4f593e4593add1c9e0f4109cf8 [ 1413.624215] env[62740]: DEBUG nova.network.neutron [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.624701] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg 7ae609f87b0e400bac249b5051d4c52e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.634689] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ae609f87b0e400bac249b5051d4c52e [ 1413.635303] env[62740]: INFO nova.compute.manager [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Took 0.37 seconds to deallocate network for instance. [ 1413.636985] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg 8855fb6164534044a4595b5880295acf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.672356] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8855fb6164534044a4595b5880295acf [ 1413.674326] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg f329f7081e2e464daa5e469aace5f3f3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.708620] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f329f7081e2e464daa5e469aace5f3f3 [ 1413.734138] env[62740]: INFO nova.scheduler.client.report [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Deleted allocations for instance eba85edb-4d86-42c9-8b49-98f2173a3eeb [ 1413.741639] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg daf3baf579a044e39a99bfc3392c55b3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.756345] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg daf3baf579a044e39a99bfc3392c55b3 [ 1413.756958] env[62740]: DEBUG oslo_concurrency.lockutils [None req-f12f33f7-44b8-44bc-be99-0066fc4c1411 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Lock "eba85edb-4d86-42c9-8b49-98f2173a3eeb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 366.894s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.757511] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg ade78761af2245d0875a82e1560567d2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.758272] env[62740]: DEBUG oslo_concurrency.lockutils [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Lock "eba85edb-4d86-42c9-8b49-98f2173a3eeb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 170.849s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.758520] env[62740]: DEBUG oslo_concurrency.lockutils [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Acquiring lock "eba85edb-4d86-42c9-8b49-98f2173a3eeb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.758738] env[62740]: DEBUG oslo_concurrency.lockutils [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Lock "eba85edb-4d86-42c9-8b49-98f2173a3eeb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.758910] env[62740]: DEBUG oslo_concurrency.lockutils [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Lock "eba85edb-4d86-42c9-8b49-98f2173a3eeb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.761448] env[62740]: INFO nova.compute.manager [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Terminating instance [ 1413.762940] env[62740]: DEBUG nova.compute.manager [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1413.763149] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1413.763788] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7b89d33-b63f-4a64-8c35-13617bf82fa8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.774929] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e94b7b1-30e8-4ee8-9291-74fe14d7028b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.785624] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ade78761af2245d0875a82e1560567d2 [ 1413.786127] env[62740]: DEBUG nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1413.787793] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg f8b1ba52f6f345cabbac69a35711cbf5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.806395] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eba85edb-4d86-42c9-8b49-98f2173a3eeb could not be found. [ 1413.806515] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1413.806653] env[62740]: INFO nova.compute.manager [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1413.806896] env[62740]: DEBUG oslo.service.loopingcall [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1413.807129] env[62740]: DEBUG nova.compute.manager [-] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1413.807226] env[62740]: DEBUG nova.network.neutron [-] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1413.820434] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8b1ba52f6f345cabbac69a35711cbf5 [ 1413.837580] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9e4703c58a5445fc964f4b205a7bf834 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.841983] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.842369] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.844991] env[62740]: INFO nova.compute.claims [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1413.846727] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 2bae7ad767ca41039f6b4fa36f51dd34 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.848753] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e4703c58a5445fc964f4b205a7bf834 [ 1413.848942] env[62740]: DEBUG nova.network.neutron [-] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.849536] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 21b671c2f1814751b55074432b4f30f2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.856438] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21b671c2f1814751b55074432b4f30f2 [ 1413.856870] env[62740]: INFO nova.compute.manager [-] [instance: eba85edb-4d86-42c9-8b49-98f2173a3eeb] Took 0.05 seconds to deallocate network for instance. [ 1413.860514] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg de21d1b33e8042b8a84a11fe46a022a5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.893786] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bae7ad767ca41039f6b4fa36f51dd34 [ 1413.896202] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 7898304affca48b09ae9e1a09c059b36 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.901121] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de21d1b33e8042b8a84a11fe46a022a5 [ 1413.904266] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7898304affca48b09ae9e1a09c059b36 [ 1413.914681] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg 47b69b64b71648b29d2545b2bdc5ca04 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.955667] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47b69b64b71648b29d2545b2bdc5ca04 [ 1413.960993] env[62740]: DEBUG oslo_concurrency.lockutils [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Lock "eba85edb-4d86-42c9-8b49-98f2173a3eeb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.203s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.961357] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-93ff8229-f9ea-4aea-bc82-c537f1732c76 tempest-ServerPasswordTestJSON-2140213848 tempest-ServerPasswordTestJSON-2140213848-project-member] Expecting reply to msg 06f2c41e400142ac8f0b86a31d282fe6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1413.973440] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06f2c41e400142ac8f0b86a31d282fe6 [ 1414.116107] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc687f4b-4b21-4ab9-8cc2-fd8a9fbc0479 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.123971] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b07d522-039a-4725-8e66-768520d60dcd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.154895] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89cf28f-46fa-4b6e-a45e-680cb5aa0c62 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.162395] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a8cb64-bb8f-456d-b516-02509a1e1132 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.176567] env[62740]: DEBUG nova.compute.provider_tree [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1414.177337] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 3270c8822e4648e4be4d22ee9bd4f7b3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1414.187085] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3270c8822e4648e4be4d22ee9bd4f7b3 [ 1414.188026] env[62740]: DEBUG nova.scheduler.client.report [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1414.190527] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg cce2e0803f1141ab8ab194fb54843efe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1414.206377] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cce2e0803f1141ab8ab194fb54843efe [ 1414.207134] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.365s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.207616] env[62740]: DEBUG nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1414.209279] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 133eb0ccf5f34a149f76fe42f12dee2a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1414.239984] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 133eb0ccf5f34a149f76fe42f12dee2a [ 1414.241671] env[62740]: DEBUG nova.compute.utils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1414.242275] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg b79307a97bc94637950cf6ba50bf7f6b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1414.243275] env[62740]: DEBUG nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1414.243456] env[62740]: DEBUG nova.network.neutron [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1414.251990] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b79307a97bc94637950cf6ba50bf7f6b [ 1414.252483] env[62740]: DEBUG nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1414.254161] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 26539116283341278e84d517cdb03474 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1414.284103] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26539116283341278e84d517cdb03474 [ 1414.286930] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 15fd70fc9ff44562b6dde917b8161000 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1414.302716] env[62740]: DEBUG nova.policy [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '070a90ba779f4bc59053f8bffc95de94', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f735ac36a0d46269560f1209706fb69', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1414.317567] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15fd70fc9ff44562b6dde917b8161000 [ 1414.318583] env[62740]: DEBUG nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1414.343368] env[62740]: DEBUG nova.virt.hardware [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1414.343597] env[62740]: DEBUG nova.virt.hardware [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1414.343754] env[62740]: DEBUG nova.virt.hardware [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1414.343939] env[62740]: DEBUG nova.virt.hardware [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1414.344099] env[62740]: DEBUG nova.virt.hardware [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1414.344249] env[62740]: DEBUG nova.virt.hardware [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1414.344455] env[62740]: DEBUG nova.virt.hardware [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1414.344613] env[62740]: DEBUG nova.virt.hardware [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1414.344779] env[62740]: DEBUG nova.virt.hardware [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1414.344941] env[62740]: DEBUG nova.virt.hardware [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1414.345131] env[62740]: DEBUG nova.virt.hardware [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1414.345962] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6b6bdf-ef60-48f7-becd-f169818bda57 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.353730] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6489d85-44af-434a-8d42-194428f39a2f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.689214] env[62740]: DEBUG nova.network.neutron [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Successfully created port: 07253d85-af48-4ef5-b335-d0ae94097019 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1414.899407] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 9b44eac75fbf4a3a92ccc857edb4998d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1414.912214] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b44eac75fbf4a3a92ccc857edb4998d [ 1414.912780] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "913ddb91-9d46-459e-8775-c9f380ed3cc4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.921774] env[62740]: DEBUG nova.compute.manager [req-3ac0b9c0-f274-482e-ab54-def89b625a42 req-64a3c844-463e-4fd6-ae54-aece298d296b service nova] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Received event network-vif-plugged-07253d85-af48-4ef5-b335-d0ae94097019 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1415.922034] env[62740]: DEBUG oslo_concurrency.lockutils [req-3ac0b9c0-f274-482e-ab54-def89b625a42 req-64a3c844-463e-4fd6-ae54-aece298d296b service nova] Acquiring lock "07efd13e-40d0-4158-b17c-6f5c75474ce3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.922229] env[62740]: DEBUG oslo_concurrency.lockutils [req-3ac0b9c0-f274-482e-ab54-def89b625a42 req-64a3c844-463e-4fd6-ae54-aece298d296b service nova] Lock "07efd13e-40d0-4158-b17c-6f5c75474ce3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.922397] env[62740]: DEBUG oslo_concurrency.lockutils [req-3ac0b9c0-f274-482e-ab54-def89b625a42 req-64a3c844-463e-4fd6-ae54-aece298d296b service nova] Lock "07efd13e-40d0-4158-b17c-6f5c75474ce3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.922597] env[62740]: DEBUG nova.compute.manager [req-3ac0b9c0-f274-482e-ab54-def89b625a42 req-64a3c844-463e-4fd6-ae54-aece298d296b service nova] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] No waiting events found dispatching network-vif-plugged-07253d85-af48-4ef5-b335-d0ae94097019 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1415.922807] env[62740]: WARNING nova.compute.manager [req-3ac0b9c0-f274-482e-ab54-def89b625a42 req-64a3c844-463e-4fd6-ae54-aece298d296b service nova] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Received unexpected event network-vif-plugged-07253d85-af48-4ef5-b335-d0ae94097019 for instance with vm_state building and task_state spawning. [ 1416.128436] env[62740]: DEBUG nova.network.neutron [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Successfully updated port: 07253d85-af48-4ef5-b335-d0ae94097019 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1416.128436] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 87b3e9f5f66144d0908b8d333c3d3322 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1416.144727] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87b3e9f5f66144d0908b8d333c3d3322 [ 1416.145481] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "refresh_cache-07efd13e-40d0-4158-b17c-6f5c75474ce3" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.145614] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquired lock "refresh_cache-07efd13e-40d0-4158-b17c-6f5c75474ce3" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.145761] env[62740]: DEBUG nova.network.neutron [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1416.146182] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg b4c9c9b53df641e4a9b3d9e4bf057967 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1416.156320] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4c9c9b53df641e4a9b3d9e4bf057967 [ 1416.233242] env[62740]: DEBUG nova.network.neutron [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1416.572682] env[62740]: DEBUG nova.network.neutron [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Updating instance_info_cache with network_info: [{"id": "07253d85-af48-4ef5-b335-d0ae94097019", "address": "fa:16:3e:5d:27:3c", "network": {"id": "f10f5770-f866-413b-86ce-20c3a1473482", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1756365504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f735ac36a0d46269560f1209706fb69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07253d85-af", "ovs_interfaceid": "07253d85-af48-4ef5-b335-d0ae94097019", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.573216] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg ac1bb85d9c1e44fa9eb3557926b21029 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1416.586841] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac1bb85d9c1e44fa9eb3557926b21029 [ 1416.587477] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Releasing lock "refresh_cache-07efd13e-40d0-4158-b17c-6f5c75474ce3" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.587762] env[62740]: DEBUG nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Instance network_info: |[{"id": "07253d85-af48-4ef5-b335-d0ae94097019", "address": "fa:16:3e:5d:27:3c", "network": {"id": "f10f5770-f866-413b-86ce-20c3a1473482", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1756365504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f735ac36a0d46269560f1209706fb69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07253d85-af", "ovs_interfaceid": "07253d85-af48-4ef5-b335-d0ae94097019", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1416.588167] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:27:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07253d85-af48-4ef5-b335-d0ae94097019', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1416.595856] env[62740]: DEBUG oslo.service.loopingcall [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1416.596380] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1416.596719] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3a37068c-95d3-4eb3-98b9-891ae7b89030 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.617363] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1416.617363] env[62740]: value = "task-640276" [ 1416.617363] env[62740]: _type = "Task" [ 1416.617363] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.625981] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640276, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.128280] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640276, 'name': CreateVM_Task, 'duration_secs': 0.304296} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.128569] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1417.129144] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.129319] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.129664] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1417.129912] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7f6bc91-4a40-479a-bad6-9877b0356cbb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.134572] env[62740]: DEBUG oslo_vmware.api [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for the task: (returnval){ [ 1417.134572] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52bee305-f74b-79c7-e752-b8b7599d340d" [ 1417.134572] env[62740]: _type = "Task" [ 1417.134572] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.141759] env[62740]: DEBUG oslo_vmware.api [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52bee305-f74b-79c7-e752-b8b7599d340d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.645140] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.645407] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1417.645621] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.943403] env[62740]: DEBUG nova.compute.manager [req-2f261802-b6dc-43b3-baf1-9c482217ccfc req-17f37931-b79f-4c73-a017-2a0b03234eb3 service nova] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Received event network-changed-07253d85-af48-4ef5-b335-d0ae94097019 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1417.943611] env[62740]: DEBUG nova.compute.manager [req-2f261802-b6dc-43b3-baf1-9c482217ccfc req-17f37931-b79f-4c73-a017-2a0b03234eb3 service nova] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Refreshing instance network info cache due to event network-changed-07253d85-af48-4ef5-b335-d0ae94097019. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1417.943834] env[62740]: DEBUG oslo_concurrency.lockutils [req-2f261802-b6dc-43b3-baf1-9c482217ccfc req-17f37931-b79f-4c73-a017-2a0b03234eb3 service nova] Acquiring lock "refresh_cache-07efd13e-40d0-4158-b17c-6f5c75474ce3" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.943980] env[62740]: DEBUG oslo_concurrency.lockutils [req-2f261802-b6dc-43b3-baf1-9c482217ccfc req-17f37931-b79f-4c73-a017-2a0b03234eb3 service nova] Acquired lock "refresh_cache-07efd13e-40d0-4158-b17c-6f5c75474ce3" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.944157] env[62740]: DEBUG nova.network.neutron [req-2f261802-b6dc-43b3-baf1-9c482217ccfc req-17f37931-b79f-4c73-a017-2a0b03234eb3 service nova] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Refreshing network info cache for port 07253d85-af48-4ef5-b335-d0ae94097019 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1417.944742] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-2f261802-b6dc-43b3-baf1-9c482217ccfc req-17f37931-b79f-4c73-a017-2a0b03234eb3 service nova] Expecting reply to msg c04206c33aff40be894d388833eed637 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1417.953450] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c04206c33aff40be894d388833eed637 [ 1418.280722] env[62740]: DEBUG nova.network.neutron [req-2f261802-b6dc-43b3-baf1-9c482217ccfc req-17f37931-b79f-4c73-a017-2a0b03234eb3 service nova] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Updated VIF entry in instance network info cache for port 07253d85-af48-4ef5-b335-d0ae94097019. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1418.281144] env[62740]: DEBUG nova.network.neutron [req-2f261802-b6dc-43b3-baf1-9c482217ccfc req-17f37931-b79f-4c73-a017-2a0b03234eb3 service nova] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Updating instance_info_cache with network_info: [{"id": "07253d85-af48-4ef5-b335-d0ae94097019", "address": "fa:16:3e:5d:27:3c", "network": {"id": "f10f5770-f866-413b-86ce-20c3a1473482", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1756365504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f735ac36a0d46269560f1209706fb69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07253d85-af", "ovs_interfaceid": "07253d85-af48-4ef5-b335-d0ae94097019", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.281659] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-2f261802-b6dc-43b3-baf1-9c482217ccfc req-17f37931-b79f-4c73-a017-2a0b03234eb3 service nova] Expecting reply to msg 5fc734074807418bb0594709c4a2d87e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1418.290291] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fc734074807418bb0594709c4a2d87e [ 1418.291190] env[62740]: DEBUG oslo_concurrency.lockutils [req-2f261802-b6dc-43b3-baf1-9c482217ccfc req-17f37931-b79f-4c73-a017-2a0b03234eb3 service nova] Releasing lock "refresh_cache-07efd13e-40d0-4158-b17c-6f5c75474ce3" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.299948] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 6581a3fe2ab941e1a40150191e9e1af2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1421.310049] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6581a3fe2ab941e1a40150191e9e1af2 [ 1421.310630] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "07efd13e-40d0-4158-b17c-6f5c75474ce3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.890688] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1426.891063] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Cleaning up deleted instances with incomplete migration {{(pid=62740) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11255}} [ 1426.891201] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f5d63e186e60437b884abcbbb511cc44 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1426.900831] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5d63e186e60437b884abcbbb511cc44 [ 1427.363597] env[62740]: WARNING oslo_vmware.rw_handles [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1427.363597] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1427.363597] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1427.363597] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1427.363597] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1427.363597] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1427.363597] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1427.363597] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1427.363597] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1427.363597] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1427.363597] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1427.363597] env[62740]: ERROR oslo_vmware.rw_handles [ 1427.363980] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/4e84ffc8-8332-4e77-a5b9-26b35a808bd5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1427.366193] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1427.366504] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Copying Virtual Disk [datastore1] vmware_temp/4e84ffc8-8332-4e77-a5b9-26b35a808bd5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/4e84ffc8-8332-4e77-a5b9-26b35a808bd5/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1427.366749] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a16037ae-a760-424e-94e9-d1601ba37175 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.374270] env[62740]: DEBUG oslo_vmware.api [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Waiting for the task: (returnval){ [ 1427.374270] env[62740]: value = "task-640277" [ 1427.374270] env[62740]: _type = "Task" [ 1427.374270] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.382369] env[62740]: DEBUG oslo_vmware.api [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Task: {'id': task-640277, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.885724] env[62740]: DEBUG oslo_vmware.exceptions [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1427.885724] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.885724] env[62740]: ERROR nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1427.885724] env[62740]: Faults: ['InvalidArgument'] [ 1427.885724] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Traceback (most recent call last): [ 1427.885724] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1427.885724] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] yield resources [ 1427.885724] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1427.885724] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] self.driver.spawn(context, instance, image_meta, [ 1427.886099] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1427.886099] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1427.886099] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1427.886099] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] self._fetch_image_if_missing(context, vi) [ 1427.886099] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1427.886099] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] image_cache(vi, tmp_image_ds_loc) [ 1427.886099] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1427.886099] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] vm_util.copy_virtual_disk( [ 1427.886099] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1427.886099] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] session._wait_for_task(vmdk_copy_task) [ 1427.886099] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1427.886099] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] return self.wait_for_task(task_ref) [ 1427.886099] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1427.886552] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] return evt.wait() [ 1427.886552] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1427.886552] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] result = hub.switch() [ 1427.886552] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1427.886552] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] return self.greenlet.switch() [ 1427.886552] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1427.886552] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] self.f(*self.args, **self.kw) [ 1427.886552] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1427.886552] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] raise exceptions.translate_fault(task_info.error) [ 1427.886552] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1427.886552] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Faults: ['InvalidArgument'] [ 1427.886552] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] [ 1427.886847] env[62740]: INFO nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Terminating instance [ 1427.888094] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.888094] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1427.888094] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19391bc6-12b3-480c-8c73-f3c5d233fd09 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.890233] env[62740]: DEBUG nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1427.890428] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1427.891176] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14cd7bd-c0ad-48f8-bde7-daa1228506a0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.897925] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1427.898156] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76fc273c-bd8b-4ef7-a219-ad21ce281921 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.900332] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1427.900511] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1427.901453] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ea53b18-9671-4ca9-9e2e-511c32d6ba3d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.906068] env[62740]: DEBUG oslo_vmware.api [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Waiting for the task: (returnval){ [ 1427.906068] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]529828f0-8085-d09a-1e07-5da186c86498" [ 1427.906068] env[62740]: _type = "Task" [ 1427.906068] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.918581] env[62740]: DEBUG oslo_vmware.api [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]529828f0-8085-d09a-1e07-5da186c86498, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.966147] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1427.966394] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1427.966580] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Deleting the datastore file [datastore1] d6c3ca16-5c7c-41e6-9850-10221603ad2a {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1427.966845] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-416bdb14-6809-48c1-982f-68195e1b189a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.972849] env[62740]: DEBUG oslo_vmware.api [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Waiting for the task: (returnval){ [ 1427.972849] env[62740]: value = "task-640279" [ 1427.972849] env[62740]: _type = "Task" [ 1427.972849] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.980262] env[62740]: DEBUG oslo_vmware.api [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Task: {'id': task-640279, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.416593] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1428.416857] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Creating directory with path [datastore1] vmware_temp/9ac10fef-12ee-4133-b02a-9592ba7ebc0f/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1428.417101] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46ab8d2f-7e14-438d-85a8-50b441427800 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.430799] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Created directory with path [datastore1] vmware_temp/9ac10fef-12ee-4133-b02a-9592ba7ebc0f/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1428.431030] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Fetch image to [datastore1] vmware_temp/9ac10fef-12ee-4133-b02a-9592ba7ebc0f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1428.431219] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore1] vmware_temp/9ac10fef-12ee-4133-b02a-9592ba7ebc0f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1428.431934] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b4758a-6c6e-492e-aea4-c01dd2c8556e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.438913] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d525787c-a872-427a-9ccb-d1fdb791f1e7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.447543] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1996fec-2029-42e2-8750-8e07841e183a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.479460] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14742aeb-fb6f-4eb0-a87b-11c1a78bbd16 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.485764] env[62740]: DEBUG oslo_vmware.api [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Task: {'id': task-640279, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069525} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.487126] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1428.487315] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1428.487493] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1428.487671] env[62740]: INFO nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1428.489400] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1a050b15-9339-464c-84ff-445e40b87588 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.491228] env[62740]: DEBUG nova.compute.claims [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1428.491400] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.491612] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.493568] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg d8dec4c46d3641d9aeadddf6c4b64190 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1428.517275] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1428.532573] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8dec4c46d3641d9aeadddf6c4b64190 [ 1428.571827] env[62740]: DEBUG oslo_vmware.rw_handles [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9ac10fef-12ee-4133-b02a-9592ba7ebc0f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1428.632706] env[62740]: DEBUG oslo_vmware.rw_handles [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1428.633013] env[62740]: DEBUG oslo_vmware.rw_handles [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9ac10fef-12ee-4133-b02a-9592ba7ebc0f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1428.848450] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf051b06-40e9-4fa4-9f27-7cc0db6d823a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.857020] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99e1dbc-66ba-44fd-b4cb-5f60d32e4fbe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.887155] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccfbad90-261f-4f7c-ac1e-3b74f5d6acd4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.894391] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce745c0-5350-4bcd-8443-f0738de0787c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.907430] env[62740]: DEBUG nova.compute.provider_tree [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1428.907975] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg c5d7297498214136a704ca4e9c040ad0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1428.915467] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5d7297498214136a704ca4e9c040ad0 [ 1428.916399] env[62740]: DEBUG nova.scheduler.client.report [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1428.918763] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 82be199eca284d4cb60d7684ed6699f8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1428.954647] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82be199eca284d4cb60d7684ed6699f8 [ 1428.955546] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.464s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.956124] env[62740]: ERROR nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1428.956124] env[62740]: Faults: ['InvalidArgument'] [ 1428.956124] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Traceback (most recent call last): [ 1428.956124] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1428.956124] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] self.driver.spawn(context, instance, image_meta, [ 1428.956124] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1428.956124] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1428.956124] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1428.956124] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] self._fetch_image_if_missing(context, vi) [ 1428.956124] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1428.956124] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] image_cache(vi, tmp_image_ds_loc) [ 1428.956124] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1428.956449] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] vm_util.copy_virtual_disk( [ 1428.956449] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1428.956449] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] session._wait_for_task(vmdk_copy_task) [ 1428.956449] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1428.956449] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] return self.wait_for_task(task_ref) [ 1428.956449] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1428.956449] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] return evt.wait() [ 1428.956449] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1428.956449] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] result = hub.switch() [ 1428.956449] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1428.956449] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] return self.greenlet.switch() [ 1428.956449] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1428.956449] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] self.f(*self.args, **self.kw) [ 1428.956755] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1428.956755] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] raise exceptions.translate_fault(task_info.error) [ 1428.956755] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1428.956755] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Faults: ['InvalidArgument'] [ 1428.956755] env[62740]: ERROR nova.compute.manager [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] [ 1428.956884] env[62740]: DEBUG nova.compute.utils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1428.958938] env[62740]: DEBUG nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Build of instance d6c3ca16-5c7c-41e6-9850-10221603ad2a was re-scheduled: A specified parameter was not correct: fileType [ 1428.958938] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1428.959340] env[62740]: DEBUG nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1428.959519] env[62740]: DEBUG nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1428.959766] env[62740]: DEBUG nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1428.959950] env[62740]: DEBUG nova.network.neutron [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1429.588789] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg c71fddafeb7d40408c2a7281d2f42465 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.601723] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c71fddafeb7d40408c2a7281d2f42465 [ 1429.602356] env[62740]: DEBUG nova.network.neutron [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.602862] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 02f2145064bd492396baa77d2878ccbd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.614645] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02f2145064bd492396baa77d2878ccbd [ 1429.615309] env[62740]: INFO nova.compute.manager [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Took 0.66 seconds to deallocate network for instance. [ 1429.617344] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 17017d92f06846adbc6cb2f7f0bd655d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.631867] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "3aa2858e-d422-408a-a83a-98382f971add" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.632926] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "3aa2858e-d422-408a-a83a-98382f971add" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.659445] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17017d92f06846adbc6cb2f7f0bd655d [ 1429.662405] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg fc63befd89584ec69a13a441c305c3fd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.696870] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc63befd89584ec69a13a441c305c3fd [ 1429.726905] env[62740]: INFO nova.scheduler.client.report [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Deleted allocations for instance d6c3ca16-5c7c-41e6-9850-10221603ad2a [ 1429.733360] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg cec4a6c0bd8840e1b6ebd61859c6d48c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.746569] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cec4a6c0bd8840e1b6ebd61859c6d48c [ 1429.747150] env[62740]: DEBUG oslo_concurrency.lockutils [None req-d7659489-7688-4aab-afd6-3f4f0c8b4bb6 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Lock "d6c3ca16-5c7c-41e6-9850-10221603ad2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 570.548s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.747696] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-033d8c43-bec8-4ca4-acdf-fc4f35477525 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg ef715fc11ba0461a827c4067f0f8b41f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.748454] env[62740]: DEBUG oslo_concurrency.lockutils [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Lock "d6c3ca16-5c7c-41e6-9850-10221603ad2a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 375.449s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.748656] env[62740]: DEBUG oslo_concurrency.lockutils [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Acquiring lock "d6c3ca16-5c7c-41e6-9850-10221603ad2a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.748875] env[62740]: DEBUG oslo_concurrency.lockutils [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Lock "d6c3ca16-5c7c-41e6-9850-10221603ad2a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.749054] env[62740]: DEBUG oslo_concurrency.lockutils [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Lock "d6c3ca16-5c7c-41e6-9850-10221603ad2a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.751074] env[62740]: INFO nova.compute.manager [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Terminating instance [ 1429.752970] env[62740]: DEBUG nova.compute.manager [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1429.753179] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1429.753636] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce8c2147-2596-434c-90c4-dadb88ca8d47 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.762505] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004002bc-ce37-4657-8bd6-ebcc17717f2f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.772998] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef715fc11ba0461a827c4067f0f8b41f [ 1429.773679] env[62740]: DEBUG nova.compute.manager [None req-033d8c43-bec8-4ca4-acdf-fc4f35477525 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 8a156903-e4cf-43ed-9c6a-962a06ff9ef4] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1429.775310] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-033d8c43-bec8-4ca4-acdf-fc4f35477525 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg cd4b0652aafe40f5abcd7c338fb0072c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.793971] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d6c3ca16-5c7c-41e6-9850-10221603ad2a could not be found. [ 1429.794189] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1429.794369] env[62740]: INFO nova.compute.manager [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1429.794607] env[62740]: DEBUG oslo.service.loopingcall [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1429.794837] env[62740]: DEBUG nova.compute.manager [-] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1429.794934] env[62740]: DEBUG nova.network.neutron [-] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1429.798723] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd4b0652aafe40f5abcd7c338fb0072c [ 1429.799209] env[62740]: DEBUG nova.compute.manager [None req-033d8c43-bec8-4ca4-acdf-fc4f35477525 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 8a156903-e4cf-43ed-9c6a-962a06ff9ef4] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1429.799550] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-033d8c43-bec8-4ca4-acdf-fc4f35477525 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 30579ce264124ae3bbb5d555ffe5d207 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.807848] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30579ce264124ae3bbb5d555ffe5d207 [ 1429.812828] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c089803b192846c1bc9c456ebaf19956 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.818208] env[62740]: DEBUG oslo_concurrency.lockutils [None req-033d8c43-bec8-4ca4-acdf-fc4f35477525 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "8a156903-e4cf-43ed-9c6a-962a06ff9ef4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.401s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.818746] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c089803b192846c1bc9c456ebaf19956 [ 1429.819762] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg d16d6300c23e471eaa10007d9e62c3a1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.820430] env[62740]: DEBUG nova.network.neutron [-] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.820769] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 01c10ba318834297b86fe75118f26f1a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.827791] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01c10ba318834297b86fe75118f26f1a [ 1429.828415] env[62740]: INFO nova.compute.manager [-] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] Took 0.03 seconds to deallocate network for instance. [ 1429.829790] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d16d6300c23e471eaa10007d9e62c3a1 [ 1429.830217] env[62740]: DEBUG nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1429.831874] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 11ea02becf9c48e0acf25d3be73752b5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.834232] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg b5cdf8b2a0304d62a2fc0f72224d2ecb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.861967] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5cdf8b2a0304d62a2fc0f72224d2ecb [ 1429.865173] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11ea02becf9c48e0acf25d3be73752b5 [ 1429.875431] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 5316140157d84ec0bb9c39c781505dd8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.880141] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.880295] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.881769] env[62740]: INFO nova.compute.claims [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1429.883407] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 3f088cebd14542a6a4ec18741d619778 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.914206] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5316140157d84ec0bb9c39c781505dd8 [ 1429.917649] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f088cebd14542a6a4ec18741d619778 [ 1429.919265] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 3e748fed8d814df6bd5e62184d6903bd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.920190] env[62740]: DEBUG oslo_concurrency.lockutils [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Lock "d6c3ca16-5c7c-41e6-9850-10221603ad2a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.172s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.920539] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-08830918-70ea-4b00-85f2-70d07e88f93c tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 443815948341451ca6e452dd13470974 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1429.921325] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "d6c3ca16-5c7c-41e6-9850-10221603ad2a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 247.662s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.921506] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d6c3ca16-5c7c-41e6-9850-10221603ad2a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1429.921677] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "d6c3ca16-5c7c-41e6-9850-10221603ad2a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.926846] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e748fed8d814df6bd5e62184d6903bd [ 1429.931986] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 443815948341451ca6e452dd13470974 [ 1430.110666] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89924959-50cc-4cbe-b2c5-14b0369f1dff {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.118085] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac9aaf1-5a7e-404e-99b7-7a6f65cc0885 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.147016] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460dd211-4ee1-4944-a774-e781db1ceb5e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.154279] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2e9e8d-bbe0-4d6c-8dd1-b3f69228bbf5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.168067] env[62740]: DEBUG nova.compute.provider_tree [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1430.168580] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 977d713dbbf84f32a48d305e37fa809c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1430.175770] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 977d713dbbf84f32a48d305e37fa809c [ 1430.176646] env[62740]: DEBUG nova.scheduler.client.report [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1430.178925] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg c0d61de19de543e085bb18b7f632b9f0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1430.189759] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0d61de19de543e085bb18b7f632b9f0 [ 1430.190316] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.310s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.190759] env[62740]: DEBUG nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1430.192450] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 3025b769d3404682ace3182ad2a017d8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1430.221672] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3025b769d3404682ace3182ad2a017d8 [ 1430.224537] env[62740]: DEBUG nova.compute.utils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1430.225128] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 81bd809719ba41b393789e2520816f20 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1430.226077] env[62740]: DEBUG nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1430.226246] env[62740]: DEBUG nova.network.neutron [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1430.234176] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81bd809719ba41b393789e2520816f20 [ 1430.234639] env[62740]: DEBUG nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1430.236277] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 70dfa62b5b0445aca2a0efc06e4ce153 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1430.263358] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70dfa62b5b0445aca2a0efc06e4ce153 [ 1430.266037] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg ce29a6a3cf344f2c8d841350090396d2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1430.280031] env[62740]: DEBUG nova.policy [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1210d5ecea1e4c4580e99c51d04a7230', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '079e30b58df0470f8f11c919757e3e46', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1430.299823] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce29a6a3cf344f2c8d841350090396d2 [ 1430.301250] env[62740]: DEBUG nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1430.325347] env[62740]: DEBUG nova.virt.hardware [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1430.325583] env[62740]: DEBUG nova.virt.hardware [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1430.325745] env[62740]: DEBUG nova.virt.hardware [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1430.325930] env[62740]: DEBUG nova.virt.hardware [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1430.326091] env[62740]: DEBUG nova.virt.hardware [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1430.326243] env[62740]: DEBUG nova.virt.hardware [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1430.326449] env[62740]: DEBUG nova.virt.hardware [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1430.327064] env[62740]: DEBUG nova.virt.hardware [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1430.327270] env[62740]: DEBUG nova.virt.hardware [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1430.327466] env[62740]: DEBUG nova.virt.hardware [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1430.327661] env[62740]: DEBUG nova.virt.hardware [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1430.328547] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716ad001-196d-448b-9a15-b1c82fd20b78 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.336609] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b1e3e7-5f3f-4383-9da2-4b1064f0ee22 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.716973] env[62740]: DEBUG nova.network.neutron [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Successfully created port: 918eefa6-d545-42fc-8b48-132fb19273cc {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1431.440043] env[62740]: DEBUG nova.network.neutron [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Successfully updated port: 918eefa6-d545-42fc-8b48-132fb19273cc {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1431.440552] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 79568f3a4c2e4d9588b1396707b24515 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1431.455667] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79568f3a4c2e4d9588b1396707b24515 [ 1431.456419] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Acquiring lock "refresh_cache-388d71f2-b229-4666-a53d-d5b07e498eed" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.456578] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Acquired lock "refresh_cache-388d71f2-b229-4666-a53d-d5b07e498eed" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.456775] env[62740]: DEBUG nova.network.neutron [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1431.457125] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 3533bbea61274d4b842e87fe0482cf85 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1431.466485] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3533bbea61274d4b842e87fe0482cf85 [ 1431.498516] env[62740]: DEBUG nova.network.neutron [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1431.666821] env[62740]: DEBUG nova.network.neutron [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Updating instance_info_cache with network_info: [{"id": "918eefa6-d545-42fc-8b48-132fb19273cc", "address": "fa:16:3e:e3:0a:ab", "network": {"id": "c065242e-da33-4a95-ae29-9f83f84727b1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1375801182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "079e30b58df0470f8f11c919757e3e46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap918eefa6-d5", "ovs_interfaceid": "918eefa6-d545-42fc-8b48-132fb19273cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1431.667379] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 0f50e36e9d9b41af9bcb379cfa1deb61 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1431.679632] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f50e36e9d9b41af9bcb379cfa1deb61 [ 1431.680232] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Releasing lock "refresh_cache-388d71f2-b229-4666-a53d-d5b07e498eed" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.680501] env[62740]: DEBUG nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Instance network_info: |[{"id": "918eefa6-d545-42fc-8b48-132fb19273cc", "address": "fa:16:3e:e3:0a:ab", "network": {"id": "c065242e-da33-4a95-ae29-9f83f84727b1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1375801182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "079e30b58df0470f8f11c919757e3e46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap918eefa6-d5", "ovs_interfaceid": "918eefa6-d545-42fc-8b48-132fb19273cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1431.680917] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:0a:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4cb37d4-2060-48b6-9e60-156a71fc7ee3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '918eefa6-d545-42fc-8b48-132fb19273cc', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1431.688371] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Creating folder: Project (079e30b58df0470f8f11c919757e3e46). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1431.689247] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c39b0d4-d823-45be-bcc9-18f09665e84a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.699666] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Created folder: Project (079e30b58df0470f8f11c919757e3e46) in parent group-v156037. [ 1431.699847] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Creating folder: Instances. Parent ref: group-v156158. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1431.700079] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc62aa52-479f-4679-9c8d-a1cf2953173c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.708626] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Created folder: Instances in parent group-v156158. [ 1431.708849] env[62740]: DEBUG oslo.service.loopingcall [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1431.709038] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1431.709234] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d15b7f7-ba5f-4aac-b4b3-04fd32980143 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.729601] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1431.729601] env[62740]: value = "task-640282" [ 1431.729601] env[62740]: _type = "Task" [ 1431.729601] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.737378] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640282, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.766072] env[62740]: DEBUG nova.compute.manager [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Received event network-vif-plugged-918eefa6-d545-42fc-8b48-132fb19273cc {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1431.766327] env[62740]: DEBUG oslo_concurrency.lockutils [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] Acquiring lock "388d71f2-b229-4666-a53d-d5b07e498eed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.766527] env[62740]: DEBUG oslo_concurrency.lockutils [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] Lock "388d71f2-b229-4666-a53d-d5b07e498eed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.766719] env[62740]: DEBUG oslo_concurrency.lockutils [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] Lock "388d71f2-b229-4666-a53d-d5b07e498eed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.766853] env[62740]: DEBUG nova.compute.manager [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] No waiting events found dispatching network-vif-plugged-918eefa6-d545-42fc-8b48-132fb19273cc {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1431.767044] env[62740]: WARNING nova.compute.manager [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Received unexpected event network-vif-plugged-918eefa6-d545-42fc-8b48-132fb19273cc for instance with vm_state building and task_state spawning. [ 1431.767223] env[62740]: DEBUG nova.compute.manager [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Received event network-changed-918eefa6-d545-42fc-8b48-132fb19273cc {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1431.767393] env[62740]: DEBUG nova.compute.manager [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Refreshing instance network info cache due to event network-changed-918eefa6-d545-42fc-8b48-132fb19273cc. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1431.767584] env[62740]: DEBUG oslo_concurrency.lockutils [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] Acquiring lock "refresh_cache-388d71f2-b229-4666-a53d-d5b07e498eed" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.767731] env[62740]: DEBUG oslo_concurrency.lockutils [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] Acquired lock "refresh_cache-388d71f2-b229-4666-a53d-d5b07e498eed" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.767952] env[62740]: DEBUG nova.network.neutron [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Refreshing network info cache for port 918eefa6-d545-42fc-8b48-132fb19273cc {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1431.768408] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] Expecting reply to msg dba05e1bdc4644129d52f022b92ff86d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1431.776654] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dba05e1bdc4644129d52f022b92ff86d [ 1432.106266] env[62740]: DEBUG nova.network.neutron [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Updated VIF entry in instance network info cache for port 918eefa6-d545-42fc-8b48-132fb19273cc. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1432.106725] env[62740]: DEBUG nova.network.neutron [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Updating instance_info_cache with network_info: [{"id": "918eefa6-d545-42fc-8b48-132fb19273cc", "address": "fa:16:3e:e3:0a:ab", "network": {"id": "c065242e-da33-4a95-ae29-9f83f84727b1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1375801182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "079e30b58df0470f8f11c919757e3e46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap918eefa6-d5", "ovs_interfaceid": "918eefa6-d545-42fc-8b48-132fb19273cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.107250] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] Expecting reply to msg eee37191739d49f79c85b9fcecf2e668 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1432.116182] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eee37191739d49f79c85b9fcecf2e668 [ 1432.116886] env[62740]: DEBUG oslo_concurrency.lockutils [req-3b50c863-ba49-45fe-95ca-9e3ca29b149c req-77917fb3-26c1-4941-8ee0-9ddde6664aca service nova] Releasing lock "refresh_cache-388d71f2-b229-4666-a53d-d5b07e498eed" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.240247] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640282, 'name': CreateVM_Task, 'duration_secs': 0.333436} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.240422] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1432.241339] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1432.241510] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1432.241830] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1432.242096] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1b99fb8-c043-48a2-91f1-06e7f7b67c4f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.246989] env[62740]: DEBUG oslo_vmware.api [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Waiting for the task: (returnval){ [ 1432.246989] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52e93861-ff5b-0384-cd66-463c6989fb22" [ 1432.246989] env[62740]: _type = "Task" [ 1432.246989] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.255872] env[62740]: DEBUG oslo_vmware.api [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52e93861-ff5b-0384-cd66-463c6989fb22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.757931] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.757931] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1432.757931] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1432.896312] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.890981] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.890981] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1433.890981] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1433.891441] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7ab2e2dac33f4f8baa0f033677355dcf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1433.912033] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ab2e2dac33f4f8baa0f033677355dcf [ 1433.914475] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 158406db-7196-4826-aefa-20a58daa186b] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1433.914632] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1433.914768] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1433.914910] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1433.915084] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1433.915215] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1433.915340] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1433.915464] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1433.915583] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1433.915704] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1433.915821] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1433.916322] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.916609] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1434.890939] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1434.891214] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1434.891473] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Cleaning up deleted instances {{(pid=62740) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11217}} [ 1434.891933] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 56a955dd938d443eb79d804579933b94 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1434.903208] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56a955dd938d443eb79d804579933b94 [ 1434.903707] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] There are 0 instances to clean {{(pid=62740) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 1436.903688] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1436.903991] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1436.904111] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1436.904269] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1436.904654] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8e34c98b1c8c4beb80b2b2a6a5cb38fc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1436.915015] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e34c98b1c8c4beb80b2b2a6a5cb38fc [ 1436.916018] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.916235] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.916436] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.916611] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1436.917697] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ecfeeea-264a-4230-a78b-cd50ec6d087a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.926695] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec7644c-35ac-4591-9a47-66082a47e36b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.940720] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80923ab0-409e-488d-93a2-f12c6a49d689 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.947742] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423c7ca3-6e5a-4c45-ad3c-c7c41d53d2e1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.975904] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181642MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1436.976066] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.976271] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.977099] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 36423214854d4b1c8f52641be4b66864 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1437.011465] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36423214854d4b1c8f52641be4b66864 [ 1437.015465] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8c98a880439441d5b02b9389eb883904 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1437.025305] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c98a880439441d5b02b9389eb883904 [ 1437.048432] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 158406db-7196-4826-aefa-20a58daa186b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1437.048615] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 7aacf4e0-b508-4a18-909a-3d1fe9458d98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1437.048752] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6005c9dc-3067-4719-a8f9-befb63f7cd8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1437.048877] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance ba23ede2-be42-48ac-b281-571ccd158dee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1437.049022] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1437.049149] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2deff09f-d24f-4609-91f2-1585e8407c2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1437.049268] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3f36f081-2851-4339-860d-0a302ef4ee2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1437.049386] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 913ddb91-9d46-459e-8775-c9f380ed3cc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1437.049521] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 07efd13e-40d0-4158-b17c-6f5c75474ce3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1437.049651] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388d71f2-b229-4666-a53d-d5b07e498eed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1437.050198] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 1ed22568f07c4f0587145c300e562a52 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1437.060319] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ed22568f07c4f0587145c300e562a52 [ 1437.061107] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 58ae8579-4ea3-45ba-a982-10e0ca82874c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1437.061597] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 21e56a17ceba48e8807476379cfc3550 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1437.071330] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21e56a17ceba48e8807476379cfc3550 [ 1437.072017] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c0daf074-eecb-4899-938f-477031efc6d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1437.072484] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 6c807b7ec6a24c6e9058a6c4e534fa7b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1437.080939] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c807b7ec6a24c6e9058a6c4e534fa7b [ 1437.813929] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 61fea037-aac3-47ef-aa6a-5dfa657d840d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1437.814492] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 78a40f407b31460ea95551f2c66ca28d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1437.825026] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78a40f407b31460ea95551f2c66ca28d [ 1437.825671] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 86c57375-8328-4344-b228-2f1ce6efc71e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1437.826207] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2fd6a9481ddb446abb9ab0e2702b0294 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1437.836397] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fd6a9481ddb446abb9ab0e2702b0294 [ 1437.837060] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a41506d2-33b2-40b8-badb-41312c7abbd2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1437.837525] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 83a2e65e1cdc4a1ebc324101e3182efd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1437.846180] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83a2e65e1cdc4a1ebc324101e3182efd [ 1437.846815] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3aa2858e-d422-408a-a83a-98382f971add has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1437.847057] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1437.847206] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1438.030579] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9331b9a1-c480-4b71-8818-4f13eacdbfc9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.038462] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9010e7-4ffc-44b4-ab66-996276cddef0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.071916] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3402ae7c-e101-4142-b0ae-bb1ae22ac726 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.082163] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55eb69b-b1f7-4af0-bae9-8e5a41365557 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.099344] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1438.099795] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f5e70898d2e24e798f73364e7ed554ae in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1438.107219] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5e70898d2e24e798f73364e7ed554ae [ 1438.108063] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1438.110330] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg c68e4f3a2d524935b3f074b2fd020e7a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1438.123109] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c68e4f3a2d524935b3f074b2fd020e7a [ 1438.123721] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1438.123904] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.148s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.891389] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1438.891713] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg e40185d80e284d92bc615a683462b12f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1438.898831] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e40185d80e284d92bc615a683462b12f [ 1439.898726] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1440.957838] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg cf717477c6ee4c4abae8f1497a77b0c9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1440.967714] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf717477c6ee4c4abae8f1497a77b0c9 [ 1440.968216] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Acquiring lock "388d71f2-b229-4666-a53d-d5b07e498eed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.550037] env[62740]: WARNING oslo_vmware.rw_handles [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1461.550037] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1461.550037] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1461.550037] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1461.550037] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1461.550037] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1461.550037] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1461.550037] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1461.550037] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1461.550037] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1461.550037] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1461.550037] env[62740]: ERROR oslo_vmware.rw_handles [ 1461.550645] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/9bc7c4c5-ca5e-4f81-aeaf-26770bda8b4f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1461.553424] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1461.553719] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Copying Virtual Disk [datastore2] vmware_temp/9bc7c4c5-ca5e-4f81-aeaf-26770bda8b4f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/9bc7c4c5-ca5e-4f81-aeaf-26770bda8b4f/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1461.554043] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e2f9b81-ff3b-43f0-8df9-b0646478676f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.562221] env[62740]: DEBUG oslo_vmware.api [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Waiting for the task: (returnval){ [ 1461.562221] env[62740]: value = "task-640283" [ 1461.562221] env[62740]: _type = "Task" [ 1461.562221] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.570550] env[62740]: DEBUG oslo_vmware.api [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Task: {'id': task-640283, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.072224] env[62740]: DEBUG oslo_vmware.exceptions [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1462.072573] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.073197] env[62740]: ERROR nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1462.073197] env[62740]: Faults: ['InvalidArgument'] [ 1462.073197] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Traceback (most recent call last): [ 1462.073197] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1462.073197] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] yield resources [ 1462.073197] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1462.073197] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] self.driver.spawn(context, instance, image_meta, [ 1462.073197] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1462.073197] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1462.073197] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1462.073197] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] self._fetch_image_if_missing(context, vi) [ 1462.073197] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1462.073536] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] image_cache(vi, tmp_image_ds_loc) [ 1462.073536] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1462.073536] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] vm_util.copy_virtual_disk( [ 1462.073536] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1462.073536] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] session._wait_for_task(vmdk_copy_task) [ 1462.073536] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1462.073536] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] return self.wait_for_task(task_ref) [ 1462.073536] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1462.073536] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] return evt.wait() [ 1462.073536] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1462.073536] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] result = hub.switch() [ 1462.073536] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1462.073536] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] return self.greenlet.switch() [ 1462.073885] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1462.073885] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] self.f(*self.args, **self.kw) [ 1462.073885] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1462.073885] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] raise exceptions.translate_fault(task_info.error) [ 1462.073885] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1462.073885] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Faults: ['InvalidArgument'] [ 1462.073885] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] [ 1462.073885] env[62740]: INFO nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Terminating instance [ 1462.075285] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.075503] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1462.075740] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-698fefe4-1aae-4536-813f-73cac459556f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.078041] env[62740]: DEBUG nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1462.078238] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1462.078986] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0351c9-81c8-4fda-b8a3-6f7b34825890 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.085390] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1462.085597] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2f85f28-740e-4af3-8657-137840332833 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.087628] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1462.087805] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1462.088745] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdf4e8af-1d14-42b7-a02c-63edeec887b6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.093205] env[62740]: DEBUG oslo_vmware.api [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Waiting for the task: (returnval){ [ 1462.093205] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]525b1c52-0460-9bb5-07f7-294890489345" [ 1462.093205] env[62740]: _type = "Task" [ 1462.093205] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.103057] env[62740]: DEBUG oslo_vmware.api [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]525b1c52-0460-9bb5-07f7-294890489345, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.161407] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1462.161616] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1462.161794] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Deleting the datastore file [datastore2] 7aacf4e0-b508-4a18-909a-3d1fe9458d98 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1462.162064] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c1557d9-0558-4ad0-aea9-ee7d41dcbee4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.168137] env[62740]: DEBUG oslo_vmware.api [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Waiting for the task: (returnval){ [ 1462.168137] env[62740]: value = "task-640285" [ 1462.168137] env[62740]: _type = "Task" [ 1462.168137] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.175462] env[62740]: DEBUG oslo_vmware.api [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Task: {'id': task-640285, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.608012] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1462.608502] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Creating directory with path [datastore2] vmware_temp/52783c17-5e48-42d0-a15b-45a4f0836326/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1462.608918] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e404925d-4224-4fe1-851b-55dd290c2999 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.622235] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Created directory with path [datastore2] vmware_temp/52783c17-5e48-42d0-a15b-45a4f0836326/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1462.622481] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Fetch image to [datastore2] vmware_temp/52783c17-5e48-42d0-a15b-45a4f0836326/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1462.622663] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/52783c17-5e48-42d0-a15b-45a4f0836326/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1462.623613] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca02244-6fa3-441c-b230-37bc8778ffba {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.630855] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7f3bf2-4f91-46ec-8229-0b73f720ad54 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.640098] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686fd466-4790-447a-affb-42692c34ac74 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.675394] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c3344d-1b79-4536-970b-5fd0e0668547 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.684073] env[62740]: DEBUG oslo_vmware.api [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Task: {'id': task-640285, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074444} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.685359] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1462.685556] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1462.685729] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1462.685903] env[62740]: INFO nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1462.687766] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-84f95611-db7e-41dd-b96b-e10e330f36b2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.689806] env[62740]: DEBUG nova.compute.claims [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1462.689989] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.690233] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.692313] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg b9c1271ec2f74180b15a52b59ee231b9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1462.719463] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1462.739794] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9c1271ec2f74180b15a52b59ee231b9 [ 1462.757869] env[62740]: DEBUG nova.scheduler.client.report [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Refreshing inventories for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1462.772103] env[62740]: DEBUG nova.scheduler.client.report [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Updating ProviderTree inventory for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1462.772486] env[62740]: DEBUG nova.compute.provider_tree [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Updating inventory in ProviderTree for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1462.777113] env[62740]: DEBUG oslo_vmware.rw_handles [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/52783c17-5e48-42d0-a15b-45a4f0836326/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1462.834749] env[62740]: DEBUG nova.scheduler.client.report [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Refreshing aggregate associations for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0, aggregates: None {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1462.839143] env[62740]: DEBUG oslo_vmware.rw_handles [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1462.839362] env[62740]: DEBUG oslo_vmware.rw_handles [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/52783c17-5e48-42d0-a15b-45a4f0836326/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1462.856517] env[62740]: DEBUG nova.scheduler.client.report [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Refreshing trait associations for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1463.030532] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa83a012-1d3c-4c21-b647-b80a2e7b2231 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.037930] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc071f28-4ac8-48a3-9106-88e5839d7022 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.068717] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1a0554-fdfa-4fcf-8da2-55ac57c73441 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.075700] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631db8b4-d975-47dd-8f7d-bf1af6e12702 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.088653] env[62740]: DEBUG nova.compute.provider_tree [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1463.089152] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 85cc5e3d08d34147a5beb885b831cd42 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.096229] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85cc5e3d08d34147a5beb885b831cd42 [ 1463.097156] env[62740]: DEBUG nova.scheduler.client.report [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1463.099569] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 2d3025e41b1a48be80f937b74ba06d6e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.111046] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d3025e41b1a48be80f937b74ba06d6e [ 1463.111810] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.422s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.112353] env[62740]: ERROR nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1463.112353] env[62740]: Faults: ['InvalidArgument'] [ 1463.112353] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Traceback (most recent call last): [ 1463.112353] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1463.112353] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] self.driver.spawn(context, instance, image_meta, [ 1463.112353] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1463.112353] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1463.112353] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1463.112353] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] self._fetch_image_if_missing(context, vi) [ 1463.112353] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1463.112353] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] image_cache(vi, tmp_image_ds_loc) [ 1463.112353] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1463.112980] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] vm_util.copy_virtual_disk( [ 1463.112980] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1463.112980] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] session._wait_for_task(vmdk_copy_task) [ 1463.112980] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1463.112980] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] return self.wait_for_task(task_ref) [ 1463.112980] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1463.112980] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] return evt.wait() [ 1463.112980] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1463.112980] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] result = hub.switch() [ 1463.112980] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1463.112980] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] return self.greenlet.switch() [ 1463.112980] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1463.112980] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] self.f(*self.args, **self.kw) [ 1463.113540] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1463.113540] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] raise exceptions.translate_fault(task_info.error) [ 1463.113540] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1463.113540] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Faults: ['InvalidArgument'] [ 1463.113540] env[62740]: ERROR nova.compute.manager [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] [ 1463.113540] env[62740]: DEBUG nova.compute.utils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1463.114625] env[62740]: DEBUG nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Build of instance 7aacf4e0-b508-4a18-909a-3d1fe9458d98 was re-scheduled: A specified parameter was not correct: fileType [ 1463.114625] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1463.115038] env[62740]: DEBUG nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1463.115231] env[62740]: DEBUG nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1463.115411] env[62740]: DEBUG nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1463.115584] env[62740]: DEBUG nova.network.neutron [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1463.500875] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg da979ddddf904ee7891129f302abf360 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.510586] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da979ddddf904ee7891129f302abf360 [ 1463.511519] env[62740]: DEBUG nova.network.neutron [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1463.511723] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 7e8d2c61018141e3984a8c3e7da143ff in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.522259] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e8d2c61018141e3984a8c3e7da143ff [ 1463.522858] env[62740]: INFO nova.compute.manager [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Took 0.41 seconds to deallocate network for instance. [ 1463.524736] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg e20203d6179c43a6a09282cc32f13e11 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.570655] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e20203d6179c43a6a09282cc32f13e11 [ 1463.574702] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg bed5f8d95dfe46d8a29b63236f3f6ba8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.607178] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bed5f8d95dfe46d8a29b63236f3f6ba8 [ 1463.634081] env[62740]: INFO nova.scheduler.client.report [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Deleted allocations for instance 7aacf4e0-b508-4a18-909a-3d1fe9458d98 [ 1463.640568] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 23d53f0163e24933923190f086090196 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.655031] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23d53f0163e24933923190f086090196 [ 1463.655865] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6bcb63ec-65e2-494f-a594-90d209f328cb tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Lock "7aacf4e0-b508-4a18-909a-3d1fe9458d98" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 403.407s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.656336] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-195f8702-2a8b-4af1-beac-c4bffc17d311 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg eef20be5559e43d5bd0ac1db2b16c7e3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.657238] env[62740]: DEBUG oslo_concurrency.lockutils [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Lock "7aacf4e0-b508-4a18-909a-3d1fe9458d98" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 206.407s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.657466] env[62740]: DEBUG oslo_concurrency.lockutils [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Acquiring lock "7aacf4e0-b508-4a18-909a-3d1fe9458d98-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.657693] env[62740]: DEBUG oslo_concurrency.lockutils [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Lock "7aacf4e0-b508-4a18-909a-3d1fe9458d98-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.657882] env[62740]: DEBUG oslo_concurrency.lockutils [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Lock "7aacf4e0-b508-4a18-909a-3d1fe9458d98-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.660010] env[62740]: INFO nova.compute.manager [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Terminating instance [ 1463.661865] env[62740]: DEBUG nova.compute.manager [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1463.661935] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1463.662419] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-285077ef-8c68-459c-8837-a9c48613a677 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.667258] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eef20be5559e43d5bd0ac1db2b16c7e3 [ 1463.667765] env[62740]: DEBUG nova.compute.manager [None req-195f8702-2a8b-4af1-beac-c4bffc17d311 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: 58ae8579-4ea3-45ba-a982-10e0ca82874c] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1463.669728] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-195f8702-2a8b-4af1-beac-c4bffc17d311 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 203f18914f1f44df8de0efc6b4ca4aa8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.674776] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d911728a-1739-414f-8329-25fd6f029505 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.693166] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 203f18914f1f44df8de0efc6b4ca4aa8 [ 1463.693735] env[62740]: DEBUG nova.compute.manager [None req-195f8702-2a8b-4af1-beac-c4bffc17d311 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] [instance: 58ae8579-4ea3-45ba-a982-10e0ca82874c] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1463.694100] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-195f8702-2a8b-4af1-beac-c4bffc17d311 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Expecting reply to msg 9f4b39e42a2f44139ca40f3693baf0ac in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.703567] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7aacf4e0-b508-4a18-909a-3d1fe9458d98 could not be found. [ 1463.703775] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1463.704016] env[62740]: INFO nova.compute.manager [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1463.704290] env[62740]: DEBUG oslo.service.loopingcall [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1463.704808] env[62740]: DEBUG nova.compute.manager [-] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1463.704867] env[62740]: DEBUG nova.network.neutron [-] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1463.708390] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f4b39e42a2f44139ca40f3693baf0ac [ 1463.718294] env[62740]: DEBUG oslo_concurrency.lockutils [None req-195f8702-2a8b-4af1-beac-c4bffc17d311 tempest-AttachVolumeShelveTestJSON-734905 tempest-AttachVolumeShelveTestJSON-734905-project-member] Lock "58ae8579-4ea3-45ba-a982-10e0ca82874c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.240s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.718832] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 44a552c4b0c94f08950c52cf19ea2445 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.726894] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44a552c4b0c94f08950c52cf19ea2445 [ 1463.727332] env[62740]: DEBUG nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1463.729143] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 55fece7a91cc482d9dfce3bd4a0c77a9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.760104] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55fece7a91cc482d9dfce3bd4a0c77a9 [ 1463.775010] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.775373] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.780466] env[62740]: INFO nova.compute.claims [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1463.782375] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 3b14f72b6b784c4b851cc6013070f211 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.817893] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b14f72b6b784c4b851cc6013070f211 [ 1463.819775] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg ade1e57e98c4425d8f863128f05254c6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.827617] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ade1e57e98c4425d8f863128f05254c6 [ 1463.853902] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 562f020c2e404d988ec2e03d949823e6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.862608] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 562f020c2e404d988ec2e03d949823e6 [ 1463.862761] env[62740]: DEBUG nova.network.neutron [-] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1463.863259] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5958d2b22dde4524800e6943e261c260 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.872031] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5958d2b22dde4524800e6943e261c260 [ 1463.872639] env[62740]: INFO nova.compute.manager [-] [instance: 7aacf4e0-b508-4a18-909a-3d1fe9458d98] Took 0.17 seconds to deallocate network for instance. [ 1463.876042] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg dace578871ae4b14879aec0de51fb8be in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.912341] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dace578871ae4b14879aec0de51fb8be [ 1463.927969] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 4dc657a46c1b4aba984261b26ac69019 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.981960] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4dc657a46c1b4aba984261b26ac69019 [ 1463.986020] env[62740]: DEBUG oslo_concurrency.lockutils [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Lock "7aacf4e0-b508-4a18-909a-3d1fe9458d98" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.328s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.986020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-754f5765-fbfb-4df7-86b1-0454a446d0dc tempest-AttachVolumeTestJSON-2026405224 tempest-AttachVolumeTestJSON-2026405224-project-member] Expecting reply to msg 06e4e6ae309b48b8a299ad584ed9fcbd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1463.998668] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06e4e6ae309b48b8a299ad584ed9fcbd [ 1464.041944] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14fa77c-6456-4506-af9f-38d0c783496f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.052572] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c7b67d-b2c0-4875-af73-497f339a328b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.082370] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d628aa-f26a-4312-8ca9-9cb86f38baab {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.089573] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f28ed2-cf14-4cb8-be21-8c0db201abca {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.102778] env[62740]: DEBUG nova.compute.provider_tree [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1464.103310] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg a66db72373534acdaa83d17cbfdde8f3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1464.111127] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a66db72373534acdaa83d17cbfdde8f3 [ 1464.111829] env[62740]: DEBUG nova.scheduler.client.report [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1464.114472] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg a96214795f944827b503d8870d284dd8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1464.128790] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a96214795f944827b503d8870d284dd8 [ 1464.129053] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.354s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.129534] env[62740]: DEBUG nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1464.131484] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg e2cf470836364926af09c8ae07a66d5d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1464.164251] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2cf470836364926af09c8ae07a66d5d [ 1464.167258] env[62740]: DEBUG nova.compute.utils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1464.167878] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 9e805660a4694272a85564d3550d2b73 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1464.169783] env[62740]: DEBUG nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1464.169955] env[62740]: DEBUG nova.network.neutron [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1464.177888] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e805660a4694272a85564d3550d2b73 [ 1464.178432] env[62740]: DEBUG nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1464.180274] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg e35db007c0074c3ea7eb122afc4ed74c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1464.210688] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e35db007c0074c3ea7eb122afc4ed74c [ 1464.214050] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg c8cc4b42ce7441bfbc8a45633b90400d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1464.246748] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8cc4b42ce7441bfbc8a45633b90400d [ 1464.247995] env[62740]: DEBUG nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1464.261446] env[62740]: DEBUG nova.policy [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd69d6db778f64160881e1dfebfd4ed7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce4469c0ef4e4e42bb30cd2f947294f3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1464.277116] env[62740]: DEBUG nova.virt.hardware [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1464.277352] env[62740]: DEBUG nova.virt.hardware [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1464.277508] env[62740]: DEBUG nova.virt.hardware [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1464.277695] env[62740]: DEBUG nova.virt.hardware [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1464.277838] env[62740]: DEBUG nova.virt.hardware [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1464.277984] env[62740]: DEBUG nova.virt.hardware [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1464.278424] env[62740]: DEBUG nova.virt.hardware [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1464.278507] env[62740]: DEBUG nova.virt.hardware [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1464.278734] env[62740]: DEBUG nova.virt.hardware [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1464.278942] env[62740]: DEBUG nova.virt.hardware [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1464.279171] env[62740]: DEBUG nova.virt.hardware [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1464.280089] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045781eb-4758-4f46-a5c7-7f1707e8ce5e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.288557] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efad264-ad53-4625-af2a-79dc9150c21c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.619382] env[62740]: DEBUG nova.network.neutron [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Successfully created port: da4649fe-c518-40a5-b93c-040911b8f3b8 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1465.306430] env[62740]: DEBUG nova.network.neutron [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Successfully updated port: da4649fe-c518-40a5-b93c-040911b8f3b8 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1465.306430] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 88b47e7759254b75a8c81c42436a1262 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1465.315968] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88b47e7759254b75a8c81c42436a1262 [ 1465.316672] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "refresh_cache-c0daf074-eecb-4899-938f-477031efc6d1" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1465.316821] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquired lock "refresh_cache-c0daf074-eecb-4899-938f-477031efc6d1" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.316970] env[62740]: DEBUG nova.network.neutron [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1465.317376] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 5946ced410984c36835af773b1625d33 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1465.324863] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5946ced410984c36835af773b1625d33 [ 1465.390326] env[62740]: DEBUG nova.network.neutron [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1465.551875] env[62740]: DEBUG nova.compute.manager [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Received event network-vif-plugged-da4649fe-c518-40a5-b93c-040911b8f3b8 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1465.552120] env[62740]: DEBUG oslo_concurrency.lockutils [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] Acquiring lock "c0daf074-eecb-4899-938f-477031efc6d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.552335] env[62740]: DEBUG oslo_concurrency.lockutils [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] Lock "c0daf074-eecb-4899-938f-477031efc6d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.552505] env[62740]: DEBUG oslo_concurrency.lockutils [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] Lock "c0daf074-eecb-4899-938f-477031efc6d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.554177] env[62740]: DEBUG nova.compute.manager [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] [instance: c0daf074-eecb-4899-938f-477031efc6d1] No waiting events found dispatching network-vif-plugged-da4649fe-c518-40a5-b93c-040911b8f3b8 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1465.554177] env[62740]: WARNING nova.compute.manager [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Received unexpected event network-vif-plugged-da4649fe-c518-40a5-b93c-040911b8f3b8 for instance with vm_state building and task_state spawning. [ 1465.554177] env[62740]: DEBUG nova.compute.manager [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Received event network-changed-da4649fe-c518-40a5-b93c-040911b8f3b8 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1465.554177] env[62740]: DEBUG nova.compute.manager [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Refreshing instance network info cache due to event network-changed-da4649fe-c518-40a5-b93c-040911b8f3b8. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1465.554177] env[62740]: DEBUG oslo_concurrency.lockutils [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] Acquiring lock "refresh_cache-c0daf074-eecb-4899-938f-477031efc6d1" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1465.608431] env[62740]: DEBUG nova.network.neutron [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Updating instance_info_cache with network_info: [{"id": "da4649fe-c518-40a5-b93c-040911b8f3b8", "address": "fa:16:3e:7d:e3:8e", "network": {"id": "c7681252-9fbe-485e-ab67-59da6e6d7279", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1927212820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce4469c0ef4e4e42bb30cd2f947294f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda4649fe-c5", "ovs_interfaceid": "da4649fe-c518-40a5-b93c-040911b8f3b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1465.609033] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg f2c1da15baeb49a7b8e94d5c73d6a35c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1465.618566] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2c1da15baeb49a7b8e94d5c73d6a35c [ 1465.619110] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Releasing lock "refresh_cache-c0daf074-eecb-4899-938f-477031efc6d1" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1465.619381] env[62740]: DEBUG nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Instance network_info: |[{"id": "da4649fe-c518-40a5-b93c-040911b8f3b8", "address": "fa:16:3e:7d:e3:8e", "network": {"id": "c7681252-9fbe-485e-ab67-59da6e6d7279", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1927212820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce4469c0ef4e4e42bb30cd2f947294f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda4649fe-c5", "ovs_interfaceid": "da4649fe-c518-40a5-b93c-040911b8f3b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1465.619924] env[62740]: DEBUG oslo_concurrency.lockutils [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] Acquired lock "refresh_cache-c0daf074-eecb-4899-938f-477031efc6d1" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.620121] env[62740]: DEBUG nova.network.neutron [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Refreshing network info cache for port da4649fe-c518-40a5-b93c-040911b8f3b8 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1465.620526] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] Expecting reply to msg 53a5be68fe2d4e8e9884bac722b66830 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1465.621296] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:e3:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0a76279-3c11-4bef-b124-2a2ee13fa377', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da4649fe-c518-40a5-b93c-040911b8f3b8', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1465.629600] env[62740]: DEBUG oslo.service.loopingcall [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1465.630156] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53a5be68fe2d4e8e9884bac722b66830 [ 1465.630493] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1465.632730] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec8e5a98-1cea-4b55-b8c9-985eb23da8f5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.653183] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1465.653183] env[62740]: value = "task-640286" [ 1465.653183] env[62740]: _type = "Task" [ 1465.653183] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.879140] env[62740]: DEBUG nova.network.neutron [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Updated VIF entry in instance network info cache for port da4649fe-c518-40a5-b93c-040911b8f3b8. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1465.879558] env[62740]: DEBUG nova.network.neutron [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Updating instance_info_cache with network_info: [{"id": "da4649fe-c518-40a5-b93c-040911b8f3b8", "address": "fa:16:3e:7d:e3:8e", "network": {"id": "c7681252-9fbe-485e-ab67-59da6e6d7279", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1927212820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce4469c0ef4e4e42bb30cd2f947294f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda4649fe-c5", "ovs_interfaceid": "da4649fe-c518-40a5-b93c-040911b8f3b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1465.880211] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] Expecting reply to msg 887fab8ca1bc4c508c20751e1300d4f7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1465.888745] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 887fab8ca1bc4c508c20751e1300d4f7 [ 1465.889375] env[62740]: DEBUG oslo_concurrency.lockutils [req-ad6c808f-fef3-4df3-a276-432cfca91eb5 req-fac10ba5-51ed-421f-96dc-5075170a5567 service nova] Releasing lock "refresh_cache-c0daf074-eecb-4899-938f-477031efc6d1" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1466.163075] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640286, 'name': CreateVM_Task, 'duration_secs': 0.340408} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.163292] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1466.164062] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1466.164340] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1466.164695] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1466.164975] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ff8a106-b929-4f0f-803a-656c061065e2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.169386] env[62740]: DEBUG oslo_vmware.api [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for the task: (returnval){ [ 1466.169386] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5248249d-452d-b001-62eb-5aead364d922" [ 1466.169386] env[62740]: _type = "Task" [ 1466.169386] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.177777] env[62740]: DEBUG oslo_vmware.api [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5248249d-452d-b001-62eb-5aead364d922, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.679898] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1466.680213] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1466.680439] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1467.359415] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 5e4b9975346a4bc39bff462672749731 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1467.371058] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e4b9975346a4bc39bff462672749731 [ 1467.371633] env[62740]: DEBUG oslo_concurrency.lockutils [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "c0daf074-eecb-4899-938f-477031efc6d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.554699] env[62740]: WARNING oslo_vmware.rw_handles [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1476.554699] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1476.554699] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1476.554699] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1476.554699] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1476.554699] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1476.554699] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1476.554699] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1476.554699] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1476.554699] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1476.554699] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1476.554699] env[62740]: ERROR oslo_vmware.rw_handles [ 1476.555502] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/9ac10fef-12ee-4133-b02a-9592ba7ebc0f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore1 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1476.557768] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1476.558095] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Copying Virtual Disk [datastore1] vmware_temp/9ac10fef-12ee-4133-b02a-9592ba7ebc0f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore1] vmware_temp/9ac10fef-12ee-4133-b02a-9592ba7ebc0f/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1476.558425] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45a1ae70-46ba-4ea0-8ff6-a7f9271879b2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.566916] env[62740]: DEBUG oslo_vmware.api [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Waiting for the task: (returnval){ [ 1476.566916] env[62740]: value = "task-640287" [ 1476.566916] env[62740]: _type = "Task" [ 1476.566916] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.574823] env[62740]: DEBUG oslo_vmware.api [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Task: {'id': task-640287, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.076862] env[62740]: DEBUG oslo_vmware.exceptions [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1477.077153] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.077753] env[62740]: ERROR nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1477.077753] env[62740]: Faults: ['InvalidArgument'] [ 1477.077753] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] Traceback (most recent call last): [ 1477.077753] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1477.077753] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] yield resources [ 1477.077753] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1477.077753] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] self.driver.spawn(context, instance, image_meta, [ 1477.077753] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1477.077753] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1477.077753] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1477.077753] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] self._fetch_image_if_missing(context, vi) [ 1477.077753] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1477.078135] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] image_cache(vi, tmp_image_ds_loc) [ 1477.078135] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1477.078135] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] vm_util.copy_virtual_disk( [ 1477.078135] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1477.078135] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] session._wait_for_task(vmdk_copy_task) [ 1477.078135] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1477.078135] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] return self.wait_for_task(task_ref) [ 1477.078135] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1477.078135] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] return evt.wait() [ 1477.078135] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1477.078135] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] result = hub.switch() [ 1477.078135] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1477.078135] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] return self.greenlet.switch() [ 1477.078575] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1477.078575] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] self.f(*self.args, **self.kw) [ 1477.078575] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1477.078575] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] raise exceptions.translate_fault(task_info.error) [ 1477.078575] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1477.078575] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] Faults: ['InvalidArgument'] [ 1477.078575] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] [ 1477.078575] env[62740]: INFO nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Terminating instance [ 1477.080950] env[62740]: DEBUG nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1477.081170] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1477.081959] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1760f9b1-7659-4951-91c7-fdc19e161368 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.088635] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1477.088858] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30fc5fbc-33ca-473f-9ef3-24587ab167d3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.152231] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1477.152482] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Deleting contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1477.152640] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Deleting the datastore file [datastore1] 158406db-7196-4826-aefa-20a58daa186b {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1477.153045] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8822e23-8f60-403b-a197-d1fd59ef4b2b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.159231] env[62740]: DEBUG oslo_vmware.api [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Waiting for the task: (returnval){ [ 1477.159231] env[62740]: value = "task-640289" [ 1477.159231] env[62740]: _type = "Task" [ 1477.159231] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.166667] env[62740]: DEBUG oslo_vmware.api [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Task: {'id': task-640289, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.669239] env[62740]: DEBUG oslo_vmware.api [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Task: {'id': task-640289, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065836} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.669520] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1477.669713] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Deleted contents of the VM from datastore datastore1 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1477.669889] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1477.670082] env[62740]: INFO nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1477.672291] env[62740]: DEBUG nova.compute.claims [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1477.672472] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.672687] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.674703] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 9fa4d1fc186348f39ab472aa7400cca4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1477.713042] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fa4d1fc186348f39ab472aa7400cca4 [ 1477.882161] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d10843b-2f12-4253-bf76-24e831480433 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.889608] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598b6450-9b98-4538-aed0-dff6c9a85cf1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.920607] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-017dcb7b-33d6-48a3-b626-a9163e6eeeb8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.927519] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c74954-3d83-486d-a06b-f0fadda30492 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.940170] env[62740]: DEBUG nova.compute.provider_tree [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1477.940690] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 3f879f00eb6949c79e3c5b2f9e01b8ee in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1477.948166] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f879f00eb6949c79e3c5b2f9e01b8ee [ 1477.949101] env[62740]: DEBUG nova.scheduler.client.report [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1477.951345] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 50047a5ea480403fa35d5448da3bbaef in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1477.964468] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50047a5ea480403fa35d5448da3bbaef [ 1477.965172] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.292s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.965694] env[62740]: ERROR nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1477.965694] env[62740]: Faults: ['InvalidArgument'] [ 1477.965694] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] Traceback (most recent call last): [ 1477.965694] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1477.965694] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] self.driver.spawn(context, instance, image_meta, [ 1477.965694] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1477.965694] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1477.965694] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1477.965694] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] self._fetch_image_if_missing(context, vi) [ 1477.965694] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1477.965694] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] image_cache(vi, tmp_image_ds_loc) [ 1477.965694] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1477.966051] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] vm_util.copy_virtual_disk( [ 1477.966051] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1477.966051] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] session._wait_for_task(vmdk_copy_task) [ 1477.966051] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1477.966051] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] return self.wait_for_task(task_ref) [ 1477.966051] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1477.966051] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] return evt.wait() [ 1477.966051] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1477.966051] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] result = hub.switch() [ 1477.966051] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1477.966051] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] return self.greenlet.switch() [ 1477.966051] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1477.966051] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] self.f(*self.args, **self.kw) [ 1477.966538] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1477.966538] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] raise exceptions.translate_fault(task_info.error) [ 1477.966538] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1477.966538] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] Faults: ['InvalidArgument'] [ 1477.966538] env[62740]: ERROR nova.compute.manager [instance: 158406db-7196-4826-aefa-20a58daa186b] [ 1477.966538] env[62740]: DEBUG nova.compute.utils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1477.967844] env[62740]: DEBUG nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Build of instance 158406db-7196-4826-aefa-20a58daa186b was re-scheduled: A specified parameter was not correct: fileType [ 1477.967844] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1477.968262] env[62740]: DEBUG nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1477.968441] env[62740]: DEBUG nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1477.968644] env[62740]: DEBUG nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1477.968821] env[62740]: DEBUG nova.network.neutron [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1478.308756] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 258d4e3480a9484892e1c8d6e0c166df in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.322323] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 258d4e3480a9484892e1c8d6e0c166df [ 1478.322323] env[62740]: DEBUG nova.network.neutron [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.322323] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg de42c1d9ce9341239c7ad7c190c58468 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.335087] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de42c1d9ce9341239c7ad7c190c58468 [ 1478.336128] env[62740]: INFO nova.compute.manager [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Took 0.37 seconds to deallocate network for instance. [ 1478.338049] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 6b1ecd244e354a5d8effcf09e5935930 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.375865] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b1ecd244e354a5d8effcf09e5935930 [ 1478.378721] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 00aaf18cb75442a1af6468d81549737e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.416052] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00aaf18cb75442a1af6468d81549737e [ 1478.436126] env[62740]: INFO nova.scheduler.client.report [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Deleted allocations for instance 158406db-7196-4826-aefa-20a58daa186b [ 1478.442670] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg d3f11cce8aeb467896713607bdd1aae9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.456082] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3f11cce8aeb467896713607bdd1aae9 [ 1478.456648] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3b129c14-753f-4d56-8317-f0704a0fae66 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Lock "158406db-7196-4826-aefa-20a58daa186b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 576.616s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.457335] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg ddba7deb366849e4a5deb0a0c9c9fafa in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.458277] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Lock "158406db-7196-4826-aefa-20a58daa186b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 380.338s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.458377] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Acquiring lock "158406db-7196-4826-aefa-20a58daa186b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.458636] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Lock "158406db-7196-4826-aefa-20a58daa186b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.458788] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Lock "158406db-7196-4826-aefa-20a58daa186b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.461981] env[62740]: INFO nova.compute.manager [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Terminating instance [ 1478.463036] env[62740]: DEBUG nova.compute.manager [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1478.463246] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1478.463834] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75f35b85-b32b-4aad-b2f6-5c46b0c6b8b9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.473198] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95676e6f-a1ee-4bad-9c47-7f6982bbe6de {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.483895] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ddba7deb366849e4a5deb0a0c9c9fafa [ 1478.484476] env[62740]: DEBUG nova.compute.manager [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1478.486046] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 116fed2c931d4f13a67544085ce78b28 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.503783] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 158406db-7196-4826-aefa-20a58daa186b could not be found. [ 1478.503981] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1478.504187] env[62740]: INFO nova.compute.manager [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] [instance: 158406db-7196-4826-aefa-20a58daa186b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1478.504427] env[62740]: DEBUG oslo.service.loopingcall [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.504645] env[62740]: DEBUG nova.compute.manager [-] [instance: 158406db-7196-4826-aefa-20a58daa186b] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1478.504744] env[62740]: DEBUG nova.network.neutron [-] [instance: 158406db-7196-4826-aefa-20a58daa186b] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1478.522395] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d698b93469554fa694c1cb7a93b29adf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.528980] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 116fed2c931d4f13a67544085ce78b28 [ 1478.532569] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d698b93469554fa694c1cb7a93b29adf [ 1478.534760] env[62740]: DEBUG nova.network.neutron [-] [instance: 158406db-7196-4826-aefa-20a58daa186b] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.535121] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg aa02e9435cef4348a134486a51eee68f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.543250] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa02e9435cef4348a134486a51eee68f [ 1478.544077] env[62740]: INFO nova.compute.manager [-] [instance: 158406db-7196-4826-aefa-20a58daa186b] Took 0.04 seconds to deallocate network for instance. [ 1478.550202] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.550533] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.552642] env[62740]: INFO nova.compute.claims [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1478.555158] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 89c7cc1213c04267930bc83dae743d1e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.559010] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 55e4f0a17ff3496c80bc7de582479e69 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.586036] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55e4f0a17ff3496c80bc7de582479e69 [ 1478.589182] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89c7cc1213c04267930bc83dae743d1e [ 1478.591192] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg ab60f1c2a85d43838390e307f0a3b67d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.599310] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab60f1c2a85d43838390e307f0a3b67d [ 1478.601020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 4cce59e2bb654570b3a64ee379352d36 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.642188] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cce59e2bb654570b3a64ee379352d36 [ 1478.647661] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Lock "158406db-7196-4826-aefa-20a58daa186b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.648009] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ee045319-c6bf-48ea-b314-d3958f7c5617 tempest-ServersTestFqdnHostnames-1264709151 tempest-ServersTestFqdnHostnames-1264709151-project-member] Expecting reply to msg 7c3f2db034934625b377fc46c628fd3c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.649582] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "158406db-7196-4826-aefa-20a58daa186b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 296.390s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.649582] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 158406db-7196-4826-aefa-20a58daa186b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1478.649582] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "158406db-7196-4826-aefa-20a58daa186b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.657736] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c3f2db034934625b377fc46c628fd3c [ 1478.769346] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24aff4c-1c22-4b8e-9da9-8c7647b7a468 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.777522] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf59610c-cd2d-4e1c-9e00-8df238b26d53 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.806560] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e10550d-a763-4467-b859-9276e427d696 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.813492] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f27ca77-bb63-4c9d-bfa2-e8b49e87626c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.826243] env[62740]: DEBUG nova.compute.provider_tree [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1478.826734] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 554cc221e9754905a0d0849019a3e129 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.834655] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 554cc221e9754905a0d0849019a3e129 [ 1478.835565] env[62740]: DEBUG nova.scheduler.client.report [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1478.837865] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg ac6ff68ce2bc44bc8f8586691c171ccb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.849351] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac6ff68ce2bc44bc8f8586691c171ccb [ 1478.850092] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.300s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.850545] env[62740]: DEBUG nova.compute.manager [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1478.852127] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 7aa464f6965442afa99dcc76925bad0b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.882211] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7aa464f6965442afa99dcc76925bad0b [ 1478.883744] env[62740]: DEBUG nova.compute.utils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1478.884337] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg f56b9a73bee2435683013d9dcfc766a7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.885467] env[62740]: DEBUG nova.compute.manager [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Not allocating networking since 'none' was specified. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1478.896806] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f56b9a73bee2435683013d9dcfc766a7 [ 1478.897320] env[62740]: DEBUG nova.compute.manager [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1478.898894] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg acdb5035b461428d8039bdf52c23213f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.925497] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acdb5035b461428d8039bdf52c23213f [ 1478.928039] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg abe60a5e0f534ceab54715d9cd2833b2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1478.959756] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abe60a5e0f534ceab54715d9cd2833b2 [ 1478.961101] env[62740]: DEBUG nova.compute.manager [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1478.986269] env[62740]: DEBUG nova.virt.hardware [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1478.986518] env[62740]: DEBUG nova.virt.hardware [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1478.986672] env[62740]: DEBUG nova.virt.hardware [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1478.986850] env[62740]: DEBUG nova.virt.hardware [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1478.986995] env[62740]: DEBUG nova.virt.hardware [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1478.987155] env[62740]: DEBUG nova.virt.hardware [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1478.987359] env[62740]: DEBUG nova.virt.hardware [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1478.987515] env[62740]: DEBUG nova.virt.hardware [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1478.987683] env[62740]: DEBUG nova.virt.hardware [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1478.987846] env[62740]: DEBUG nova.virt.hardware [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1478.988033] env[62740]: DEBUG nova.virt.hardware [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1478.988952] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c7b4ef-7e7e-4ebc-a849-21c6c21c55b6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.996756] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20a2570-6ef8-4d64-a496-a4991dfb5efe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.010366] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Instance VIF info [] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1479.015739] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Creating folder: Project (0bf03fb76a93466e80c0159a1eaabb74). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1479.016015] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-680fe9d1-eca6-4bfb-8c6a-1a8349518630 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.025812] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Created folder: Project (0bf03fb76a93466e80c0159a1eaabb74) in parent group-v156037. [ 1479.025991] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Creating folder: Instances. Parent ref: group-v156162. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1479.026228] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c8feaae-4945-4c09-9058-af9432478306 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.034399] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Created folder: Instances in parent group-v156162. [ 1479.034619] env[62740]: DEBUG oslo.service.loopingcall [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1479.034797] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1479.034985] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-513f4a19-d8c6-41a7-a69c-699519c02751 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.050175] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1479.050175] env[62740]: value = "task-640292" [ 1479.050175] env[62740]: _type = "Task" [ 1479.050175] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.058039] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640292, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.560836] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640292, 'name': CreateVM_Task, 'duration_secs': 0.251948} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.561153] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1479.561500] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.561683] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.562049] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1479.562295] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4470f5c-9701-44a3-abea-c1996efa5490 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.567330] env[62740]: DEBUG oslo_vmware.api [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Waiting for the task: (returnval){ [ 1479.567330] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52cefe15-88ac-2c7b-e19f-718b135d7f76" [ 1479.567330] env[62740]: _type = "Task" [ 1479.567330] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.575757] env[62740]: DEBUG oslo_vmware.api [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52cefe15-88ac-2c7b-e19f-718b135d7f76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.078200] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.078567] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1480.078872] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.181766] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e500201275cc4dd2a9328b571c148680 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1490.191117] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e500201275cc4dd2a9328b571c148680 [ 1493.713929] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 260a3920300a481a9366e0a27365c206 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1493.723407] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 260a3920300a481a9366e0a27365c206 [ 1493.723831] env[62740]: DEBUG oslo_concurrency.lockutils [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquiring lock "61fea037-aac3-47ef-aa6a-5dfa657d840d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.886317] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1494.889931] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1494.890111] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1494.890237] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1494.890811] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7e5365ec782c4660ac54af13a3bb3033 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1494.909147] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e5365ec782c4660ac54af13a3bb3033 [ 1494.911403] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1494.911563] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1494.911698] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1494.911826] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1494.911951] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1494.912086] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1494.912210] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1494.912333] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1494.912447] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1494.912565] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1494.912686] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1494.913177] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1495.890631] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1496.890949] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1496.891266] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1496.891371] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1497.891067] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1497.891067] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 3c12f5cfc5aa4a5b8cd2f33e99675356 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1497.901224] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c12f5cfc5aa4a5b8cd2f33e99675356 [ 1497.902260] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.902474] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.902645] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.902802] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1497.903906] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4f88cb-3119-4d8b-8ec3-d5d13cddbc2c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.912688] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb7e1a8-61d7-4114-92b3-fe7671d039f2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.926180] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91157ed-a239-4d89-9f1b-0ed08c1ca459 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.932413] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7abf86-6b0f-4c5d-86c4-28aa1ca13fe6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.960450] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181550MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1497.960611] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.960847] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.961639] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2056232803e64666a20bd2cf3f8db169 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1497.994991] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2056232803e64666a20bd2cf3f8db169 [ 1497.999104] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 86dcccf559494daeb9be3b452edbcaf9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1498.012782] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86dcccf559494daeb9be3b452edbcaf9 [ 1498.034653] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 6005c9dc-3067-4719-a8f9-befb63f7cd8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.034828] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance ba23ede2-be42-48ac-b281-571ccd158dee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.034959] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.035097] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2deff09f-d24f-4609-91f2-1585e8407c2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.035220] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3f36f081-2851-4339-860d-0a302ef4ee2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.035344] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 913ddb91-9d46-459e-8775-c9f380ed3cc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.035460] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 07efd13e-40d0-4158-b17c-6f5c75474ce3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.035575] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388d71f2-b229-4666-a53d-d5b07e498eed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.035691] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c0daf074-eecb-4899-938f-477031efc6d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.035839] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 61fea037-aac3-47ef-aa6a-5dfa657d840d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.036381] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 1c57bff600aa4e66b446914d72cbcbcc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1498.047160] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c57bff600aa4e66b446914d72cbcbcc [ 1498.047900] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 86c57375-8328-4344-b228-2f1ce6efc71e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1498.048680] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5a58dbeb939b418d9c6d387720f55a91 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1498.058647] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a58dbeb939b418d9c6d387720f55a91 [ 1498.059265] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a41506d2-33b2-40b8-badb-41312c7abbd2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1498.059755] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 93ae59e07f2a458abc9de63c418ee07c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1498.069684] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93ae59e07f2a458abc9de63c418ee07c [ 1498.070344] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3aa2858e-d422-408a-a83a-98382f971add has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1498.070567] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1498.070726] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1498.216302] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0167bb7-ccb7-4b53-a0e7-c0248ece7977 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.223650] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d808a3d-bec9-48b7-8455-2feff60ba0ec {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.253925] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02bfb2cf-0d16-4c54-967d-363159ccbb78 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.260545] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e740a094-5e5a-4336-90cf-2fd80935e8cf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.272820] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1498.273276] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg bbe6b3b0ac744e929e0663dceecd6943 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1498.281011] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbe6b3b0ac744e929e0663dceecd6943 [ 1498.281899] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1498.284121] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 084d4e2ca7364133a4d26f9bd4c20ef8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1498.296950] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 084d4e2ca7364133a4d26f9bd4c20ef8 [ 1498.297587] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1498.297770] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.337s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.298554] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1500.891614] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1501.887074] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1501.887932] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8bf8d389e2c14156bed6ac29ade53cc4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1501.906975] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bf8d389e2c14156bed6ac29ade53cc4 [ 1509.590926] env[62740]: WARNING oslo_vmware.rw_handles [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1509.590926] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1509.590926] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1509.590926] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1509.590926] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1509.590926] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1509.590926] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1509.590926] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1509.590926] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1509.590926] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1509.590926] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1509.590926] env[62740]: ERROR oslo_vmware.rw_handles [ 1509.591804] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/52783c17-5e48-42d0-a15b-45a4f0836326/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1509.593932] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1509.594200] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Copying Virtual Disk [datastore2] vmware_temp/52783c17-5e48-42d0-a15b-45a4f0836326/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/52783c17-5e48-42d0-a15b-45a4f0836326/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1509.594520] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3767463b-07c2-4a00-b4c3-74666af4df9d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.602472] env[62740]: DEBUG oslo_vmware.api [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Waiting for the task: (returnval){ [ 1509.602472] env[62740]: value = "task-640293" [ 1509.602472] env[62740]: _type = "Task" [ 1509.602472] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.610209] env[62740]: DEBUG oslo_vmware.api [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Task: {'id': task-640293, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.113909] env[62740]: DEBUG oslo_vmware.exceptions [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1510.114217] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.114789] env[62740]: ERROR nova.compute.manager [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1510.114789] env[62740]: Faults: ['InvalidArgument'] [ 1510.114789] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Traceback (most recent call last): [ 1510.114789] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1510.114789] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] yield resources [ 1510.114789] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1510.114789] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] self.driver.spawn(context, instance, image_meta, [ 1510.114789] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1510.114789] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1510.114789] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1510.114789] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] self._fetch_image_if_missing(context, vi) [ 1510.114789] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1510.115150] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] image_cache(vi, tmp_image_ds_loc) [ 1510.115150] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1510.115150] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] vm_util.copy_virtual_disk( [ 1510.115150] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1510.115150] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] session._wait_for_task(vmdk_copy_task) [ 1510.115150] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1510.115150] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] return self.wait_for_task(task_ref) [ 1510.115150] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1510.115150] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] return evt.wait() [ 1510.115150] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1510.115150] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] result = hub.switch() [ 1510.115150] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1510.115150] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] return self.greenlet.switch() [ 1510.115473] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1510.115473] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] self.f(*self.args, **self.kw) [ 1510.115473] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1510.115473] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] raise exceptions.translate_fault(task_info.error) [ 1510.115473] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1510.115473] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Faults: ['InvalidArgument'] [ 1510.115473] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] [ 1510.115473] env[62740]: INFO nova.compute.manager [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Terminating instance [ 1510.116854] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.117011] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1510.117907] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd8092b6-55e8-412a-ad16-79c93a6dce01 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.119439] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "refresh_cache-6005c9dc-3067-4719-a8f9-befb63f7cd8d" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1510.119648] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquired lock "refresh_cache-6005c9dc-3067-4719-a8f9-befb63f7cd8d" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.119853] env[62740]: DEBUG nova.network.neutron [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1510.120310] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 223411b1e2de44f485d8a7276855ee05 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1510.127059] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1510.127244] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1510.128322] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 223411b1e2de44f485d8a7276855ee05 [ 1510.128710] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15545a0a-5cfb-4cb9-90e4-c1872c73220b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.137473] env[62740]: DEBUG oslo_vmware.api [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Waiting for the task: (returnval){ [ 1510.137473] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5237c160-63d8-248e-985c-8c76c9dd36fb" [ 1510.137473] env[62740]: _type = "Task" [ 1510.137473] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.144881] env[62740]: DEBUG oslo_vmware.api [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5237c160-63d8-248e-985c-8c76c9dd36fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.214271] env[62740]: DEBUG nova.network.neutron [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1510.285833] env[62740]: DEBUG nova.network.neutron [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1510.286419] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 5236c3a91ee745849679d07f806ad37a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1510.295031] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5236c3a91ee745849679d07f806ad37a [ 1510.295436] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Releasing lock "refresh_cache-6005c9dc-3067-4719-a8f9-befb63f7cd8d" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.295855] env[62740]: DEBUG nova.compute.manager [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1510.296061] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1510.297148] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ee534b-65a9-470b-ab16-133291a6aafa {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.305084] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1510.305314] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e14162a9-b062-4931-adde-e5324221ee3a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.335811] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1510.336039] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1510.336229] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Deleting the datastore file [datastore2] 6005c9dc-3067-4719-a8f9-befb63f7cd8d {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1510.336530] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b1fc486-7bc2-4b18-bac4-07765c79c0a1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.343455] env[62740]: DEBUG oslo_vmware.api [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Waiting for the task: (returnval){ [ 1510.343455] env[62740]: value = "task-640295" [ 1510.343455] env[62740]: _type = "Task" [ 1510.343455] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.350680] env[62740]: DEBUG oslo_vmware.api [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Task: {'id': task-640295, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.647548] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1510.647838] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Creating directory with path [datastore2] vmware_temp/ac63b490-b5dc-4b1a-9434-b3e2cadc84f5/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1510.648047] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4171e1ff-8ab9-4fd0-be37-870a99cec179 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.658514] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Created directory with path [datastore2] vmware_temp/ac63b490-b5dc-4b1a-9434-b3e2cadc84f5/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1510.658710] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Fetch image to [datastore2] vmware_temp/ac63b490-b5dc-4b1a-9434-b3e2cadc84f5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1510.658892] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/ac63b490-b5dc-4b1a-9434-b3e2cadc84f5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1510.659607] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0717af5f-5c78-4b64-90ee-0c2aed26c81b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.665714] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5ee370-00ff-4c99-83c7-642daf43c06b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.674339] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ec5058-714f-46e3-adbe-fd1a4e68cabb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.703570] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a173b1f3-44c8-43a3-bbad-30456090b499 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.708963] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-74806945-7fb0-4493-ad43-a4f008944b24 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.728503] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1510.777189] env[62740]: DEBUG oslo_vmware.rw_handles [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ac63b490-b5dc-4b1a-9434-b3e2cadc84f5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1510.837240] env[62740]: DEBUG oslo_vmware.rw_handles [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1510.837489] env[62740]: DEBUG oslo_vmware.rw_handles [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ac63b490-b5dc-4b1a-9434-b3e2cadc84f5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1510.853955] env[62740]: DEBUG oslo_vmware.api [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Task: {'id': task-640295, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.050246} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.854215] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1510.854395] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1510.854561] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1510.854735] env[62740]: INFO nova.compute.manager [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1510.855060] env[62740]: DEBUG oslo.service.loopingcall [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1510.855231] env[62740]: DEBUG nova.compute.manager [-] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Skipping network deallocation for instance since networking was not requested. {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1510.857510] env[62740]: DEBUG nova.compute.claims [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1510.857510] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1510.857708] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1510.859631] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 18521839361e4e36bb837e2caf73f4d1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1510.892374] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18521839361e4e36bb837e2caf73f4d1 [ 1511.041589] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae8c3bb-f11f-4ae7-94cd-c3c746904ed8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.049023] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ede80d-71df-4334-8280-975b61a3c830 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.077644] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74700686-2fad-4122-b672-1d1fc7c52f35 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.084628] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af5b711-a526-4230-b264-5318e834bd1a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.097280] env[62740]: DEBUG nova.compute.provider_tree [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1511.097774] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 3eadfb91a20c4b2faa87b459984048d7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.104931] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3eadfb91a20c4b2faa87b459984048d7 [ 1511.105800] env[62740]: DEBUG nova.scheduler.client.report [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1511.107990] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 96dc2c6b0e074a128138c7aaf9650884 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.118218] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96dc2c6b0e074a128138c7aaf9650884 [ 1511.118906] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.261s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.119434] env[62740]: ERROR nova.compute.manager [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1511.119434] env[62740]: Faults: ['InvalidArgument'] [ 1511.119434] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Traceback (most recent call last): [ 1511.119434] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1511.119434] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] self.driver.spawn(context, instance, image_meta, [ 1511.119434] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1511.119434] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1511.119434] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1511.119434] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] self._fetch_image_if_missing(context, vi) [ 1511.119434] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1511.119434] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] image_cache(vi, tmp_image_ds_loc) [ 1511.119434] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1511.119807] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] vm_util.copy_virtual_disk( [ 1511.119807] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1511.119807] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] session._wait_for_task(vmdk_copy_task) [ 1511.119807] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1511.119807] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] return self.wait_for_task(task_ref) [ 1511.119807] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1511.119807] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] return evt.wait() [ 1511.119807] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1511.119807] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] result = hub.switch() [ 1511.119807] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1511.119807] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] return self.greenlet.switch() [ 1511.119807] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1511.119807] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] self.f(*self.args, **self.kw) [ 1511.120166] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1511.120166] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] raise exceptions.translate_fault(task_info.error) [ 1511.120166] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1511.120166] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Faults: ['InvalidArgument'] [ 1511.120166] env[62740]: ERROR nova.compute.manager [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] [ 1511.120166] env[62740]: DEBUG nova.compute.utils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1511.121709] env[62740]: DEBUG nova.compute.manager [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Build of instance 6005c9dc-3067-4719-a8f9-befb63f7cd8d was re-scheduled: A specified parameter was not correct: fileType [ 1511.121709] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1511.122186] env[62740]: DEBUG nova.compute.manager [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1511.122415] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "refresh_cache-6005c9dc-3067-4719-a8f9-befb63f7cd8d" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.122564] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquired lock "refresh_cache-6005c9dc-3067-4719-a8f9-befb63f7cd8d" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.122730] env[62740]: DEBUG nova.network.neutron [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1511.123118] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 1a46361f711545be98738a38b1d6d3e6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.128988] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a46361f711545be98738a38b1d6d3e6 [ 1511.147157] env[62740]: DEBUG nova.network.neutron [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1511.247152] env[62740]: DEBUG nova.network.neutron [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.247732] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg d53f1d4a02fc43d6b921f8eeaeb42983 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.256063] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d53f1d4a02fc43d6b921f8eeaeb42983 [ 1511.256692] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Releasing lock "refresh_cache-6005c9dc-3067-4719-a8f9-befb63f7cd8d" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.256908] env[62740]: DEBUG nova.compute.manager [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1511.257165] env[62740]: DEBUG nova.compute.manager [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: 6005c9dc-3067-4719-a8f9-befb63f7cd8d] Skipping network deallocation for instance since networking was not requested. {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1511.258928] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 12ddf66c96754db5aacde6a227f8445f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.293605] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12ddf66c96754db5aacde6a227f8445f [ 1511.296310] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 064c340eb3c44a0ab6b6617bcc94a6f8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.328725] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 064c340eb3c44a0ab6b6617bcc94a6f8 [ 1511.350615] env[62740]: INFO nova.scheduler.client.report [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Deleted allocations for instance 6005c9dc-3067-4719-a8f9-befb63f7cd8d [ 1511.356562] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg e75fae12c4bd482cbd3a4fe5c87a4add in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.369428] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e75fae12c4bd482cbd3a4fe5c87a4add [ 1511.370050] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3592d3f5-73e8-4592-bed9-26d90e2263e5 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "6005c9dc-3067-4719-a8f9-befb63f7cd8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 382.093s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.370874] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg c0e6f6f04afc42ac90f3971971f31534 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.384643] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0e6f6f04afc42ac90f3971971f31534 [ 1511.384903] env[62740]: DEBUG nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1511.386534] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg bb8adc195cff4d38a2958da900238131 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.421228] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb8adc195cff4d38a2958da900238131 [ 1511.440437] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.440628] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.442260] env[62740]: INFO nova.compute.claims [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1511.444138] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 88523ad637284b51985edc71e3086f37 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.475798] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88523ad637284b51985edc71e3086f37 [ 1511.477771] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg fd7e9efdb11547acbaa12f5c0ea31347 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.485964] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd7e9efdb11547acbaa12f5c0ea31347 [ 1511.632930] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-957698f9-6bc4-4292-9fab-85648f6834b5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.640060] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02282e59-ee48-4385-93ea-a33c83551e05 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.668908] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d5de95-94e2-4642-a13c-341f69f31960 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.675593] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710fc0fc-54dc-40eb-8de0-1bbbcdbc76bc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.688027] env[62740]: DEBUG nova.compute.provider_tree [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1511.688539] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 2e690debc393407f814593be80472159 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.696200] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e690debc393407f814593be80472159 [ 1511.697054] env[62740]: DEBUG nova.scheduler.client.report [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1511.699296] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 1475a7cde3c3498f8c618034d435d9fe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.711812] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1475a7cde3c3498f8c618034d435d9fe [ 1511.712468] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.272s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.712917] env[62740]: DEBUG nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1511.714470] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 2c9278288326431e936d7cc829b39465 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.741638] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c9278288326431e936d7cc829b39465 [ 1511.743208] env[62740]: DEBUG nova.compute.utils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1511.743816] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 83b059382f094a2ebd5734f2fae1a5ce in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.744919] env[62740]: DEBUG nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1511.745883] env[62740]: DEBUG nova.network.neutron [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1511.753520] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83b059382f094a2ebd5734f2fae1a5ce [ 1511.754235] env[62740]: DEBUG nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1511.755676] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 1bd83dab476a4dc4851eb25b23086f5f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.787686] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1bd83dab476a4dc4851eb25b23086f5f [ 1511.790823] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 11a9096df9244af986641c21557360b4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1511.808033] env[62740]: DEBUG nova.policy [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa549a18cbf84678844e14ddd094d70e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '065d149aea7645d7a5e32c0d14ff0936', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1511.819008] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11a9096df9244af986641c21557360b4 [ 1511.820116] env[62740]: DEBUG nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1511.844594] env[62740]: DEBUG nova.virt.hardware [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1511.844832] env[62740]: DEBUG nova.virt.hardware [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1511.844991] env[62740]: DEBUG nova.virt.hardware [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1511.845189] env[62740]: DEBUG nova.virt.hardware [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1511.845338] env[62740]: DEBUG nova.virt.hardware [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1511.845487] env[62740]: DEBUG nova.virt.hardware [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1511.845695] env[62740]: DEBUG nova.virt.hardware [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1511.845857] env[62740]: DEBUG nova.virt.hardware [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1511.846033] env[62740]: DEBUG nova.virt.hardware [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1511.846202] env[62740]: DEBUG nova.virt.hardware [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1511.846378] env[62740]: DEBUG nova.virt.hardware [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1511.847224] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1676aa5f-d72a-42e6-a7ea-bb982a098a37 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.854486] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f7a3d8-532b-47c7-affc-7a6c8515cbd0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.179974] env[62740]: DEBUG nova.network.neutron [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Successfully created port: cf4b27e8-9cce-4793-8d64-6d3756aac8d2 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1512.870665] env[62740]: DEBUG nova.compute.manager [req-d6edf885-c7dd-442e-82af-e6fe6880d21c req-2278c6e5-6b70-418e-add5-67b4a0b8538d service nova] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Received event network-vif-plugged-cf4b27e8-9cce-4793-8d64-6d3756aac8d2 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1512.870839] env[62740]: DEBUG oslo_concurrency.lockutils [req-d6edf885-c7dd-442e-82af-e6fe6880d21c req-2278c6e5-6b70-418e-add5-67b4a0b8538d service nova] Acquiring lock "86c57375-8328-4344-b228-2f1ce6efc71e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.870965] env[62740]: DEBUG oslo_concurrency.lockutils [req-d6edf885-c7dd-442e-82af-e6fe6880d21c req-2278c6e5-6b70-418e-add5-67b4a0b8538d service nova] Lock "86c57375-8328-4344-b228-2f1ce6efc71e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.871160] env[62740]: DEBUG oslo_concurrency.lockutils [req-d6edf885-c7dd-442e-82af-e6fe6880d21c req-2278c6e5-6b70-418e-add5-67b4a0b8538d service nova] Lock "86c57375-8328-4344-b228-2f1ce6efc71e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.871328] env[62740]: DEBUG nova.compute.manager [req-d6edf885-c7dd-442e-82af-e6fe6880d21c req-2278c6e5-6b70-418e-add5-67b4a0b8538d service nova] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] No waiting events found dispatching network-vif-plugged-cf4b27e8-9cce-4793-8d64-6d3756aac8d2 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1512.871490] env[62740]: WARNING nova.compute.manager [req-d6edf885-c7dd-442e-82af-e6fe6880d21c req-2278c6e5-6b70-418e-add5-67b4a0b8538d service nova] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Received unexpected event network-vif-plugged-cf4b27e8-9cce-4793-8d64-6d3756aac8d2 for instance with vm_state building and task_state spawning. [ 1512.941288] env[62740]: DEBUG nova.network.neutron [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Successfully updated port: cf4b27e8-9cce-4793-8d64-6d3756aac8d2 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1512.941782] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 1990895b90914949b4cb669574500c79 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1512.953241] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1990895b90914949b4cb669574500c79 [ 1512.954011] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "refresh_cache-86c57375-8328-4344-b228-2f1ce6efc71e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.954211] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "refresh_cache-86c57375-8328-4344-b228-2f1ce6efc71e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.955086] env[62740]: DEBUG nova.network.neutron [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1512.955086] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg eb451d0a91a342caabddcbe1daf535d5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1512.963711] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb451d0a91a342caabddcbe1daf535d5 [ 1512.998159] env[62740]: DEBUG nova.network.neutron [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1513.244913] env[62740]: DEBUG nova.network.neutron [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Updating instance_info_cache with network_info: [{"id": "cf4b27e8-9cce-4793-8d64-6d3756aac8d2", "address": "fa:16:3e:00:be:f7", "network": {"id": "a1bf429f-63e1-4b06-ba31-36e8e686268d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1763096855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "065d149aea7645d7a5e32c0d14ff0936", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf4b27e8-9c", "ovs_interfaceid": "cf4b27e8-9cce-4793-8d64-6d3756aac8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1513.245474] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg a6e1c300232a48d0bdb5173c5f393b48 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1513.254985] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6e1c300232a48d0bdb5173c5f393b48 [ 1513.255610] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Releasing lock "refresh_cache-86c57375-8328-4344-b228-2f1ce6efc71e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1513.255892] env[62740]: DEBUG nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Instance network_info: |[{"id": "cf4b27e8-9cce-4793-8d64-6d3756aac8d2", "address": "fa:16:3e:00:be:f7", "network": {"id": "a1bf429f-63e1-4b06-ba31-36e8e686268d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1763096855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "065d149aea7645d7a5e32c0d14ff0936", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf4b27e8-9c", "ovs_interfaceid": "cf4b27e8-9cce-4793-8d64-6d3756aac8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1513.256342] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:be:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b399c74-1411-408a-b4cd-84e268ae83fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf4b27e8-9cce-4793-8d64-6d3756aac8d2', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1513.264103] env[62740]: DEBUG oslo.service.loopingcall [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1513.264580] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1513.264819] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a63e0ab-2999-4748-a690-dd2cbd40eaed {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.285479] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1513.285479] env[62740]: value = "task-640296" [ 1513.285479] env[62740]: _type = "Task" [ 1513.285479] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.293694] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640296, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.795630] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640296, 'name': CreateVM_Task} progress is 99%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.297187] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640296, 'name': CreateVM_Task, 'duration_secs': 0.660447} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.297448] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1514.298091] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.298271] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.298626] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1514.298885] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-902f43de-9107-4dee-8262-ca7c805ae579 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.303160] env[62740]: DEBUG oslo_vmware.api [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 1514.303160] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]524ee85a-7fed-b81d-2727-f0c7aba5cb06" [ 1514.303160] env[62740]: _type = "Task" [ 1514.303160] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.310696] env[62740]: DEBUG oslo_vmware.api [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]524ee85a-7fed-b81d-2727-f0c7aba5cb06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.435734] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 30d2aaf60db84e9e8bee0f7f08aa7978 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1514.445905] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30d2aaf60db84e9e8bee0f7f08aa7978 [ 1514.446434] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "86c57375-8328-4344-b228-2f1ce6efc71e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.815265] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1514.815530] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1514.815747] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.925921] env[62740]: DEBUG nova.compute.manager [req-a87eb50f-93c6-4374-833d-bb05e2fa91eb req-a8dea51a-97e4-481d-b0c1-c5a11653abd0 service nova] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Received event network-changed-cf4b27e8-9cce-4793-8d64-6d3756aac8d2 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1514.926175] env[62740]: DEBUG nova.compute.manager [req-a87eb50f-93c6-4374-833d-bb05e2fa91eb req-a8dea51a-97e4-481d-b0c1-c5a11653abd0 service nova] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Refreshing instance network info cache due to event network-changed-cf4b27e8-9cce-4793-8d64-6d3756aac8d2. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1514.926439] env[62740]: DEBUG oslo_concurrency.lockutils [req-a87eb50f-93c6-4374-833d-bb05e2fa91eb req-a8dea51a-97e4-481d-b0c1-c5a11653abd0 service nova] Acquiring lock "refresh_cache-86c57375-8328-4344-b228-2f1ce6efc71e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.926642] env[62740]: DEBUG oslo_concurrency.lockutils [req-a87eb50f-93c6-4374-833d-bb05e2fa91eb req-a8dea51a-97e4-481d-b0c1-c5a11653abd0 service nova] Acquired lock "refresh_cache-86c57375-8328-4344-b228-2f1ce6efc71e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.926842] env[62740]: DEBUG nova.network.neutron [req-a87eb50f-93c6-4374-833d-bb05e2fa91eb req-a8dea51a-97e4-481d-b0c1-c5a11653abd0 service nova] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Refreshing network info cache for port cf4b27e8-9cce-4793-8d64-6d3756aac8d2 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1514.927408] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-a87eb50f-93c6-4374-833d-bb05e2fa91eb req-a8dea51a-97e4-481d-b0c1-c5a11653abd0 service nova] Expecting reply to msg 589b701ca9614859925d1ef4e9e01077 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1514.934895] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 589b701ca9614859925d1ef4e9e01077 [ 1515.245115] env[62740]: DEBUG nova.network.neutron [req-a87eb50f-93c6-4374-833d-bb05e2fa91eb req-a8dea51a-97e4-481d-b0c1-c5a11653abd0 service nova] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Updated VIF entry in instance network info cache for port cf4b27e8-9cce-4793-8d64-6d3756aac8d2. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1515.245505] env[62740]: DEBUG nova.network.neutron [req-a87eb50f-93c6-4374-833d-bb05e2fa91eb req-a8dea51a-97e4-481d-b0c1-c5a11653abd0 service nova] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Updating instance_info_cache with network_info: [{"id": "cf4b27e8-9cce-4793-8d64-6d3756aac8d2", "address": "fa:16:3e:00:be:f7", "network": {"id": "a1bf429f-63e1-4b06-ba31-36e8e686268d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1763096855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "065d149aea7645d7a5e32c0d14ff0936", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf4b27e8-9c", "ovs_interfaceid": "cf4b27e8-9cce-4793-8d64-6d3756aac8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.246043] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-a87eb50f-93c6-4374-833d-bb05e2fa91eb req-a8dea51a-97e4-481d-b0c1-c5a11653abd0 service nova] Expecting reply to msg 01e290a089df4885bac6fd34a1733da9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1515.254513] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01e290a089df4885bac6fd34a1733da9 [ 1515.255252] env[62740]: DEBUG oslo_concurrency.lockutils [req-a87eb50f-93c6-4374-833d-bb05e2fa91eb req-a8dea51a-97e4-481d-b0c1-c5a11653abd0 service nova] Releasing lock "refresh_cache-86c57375-8328-4344-b228-2f1ce6efc71e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.890636] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1555.890952] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1555.891072] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1555.891639] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg cbe98a006a1f42a6b593e0d266b4d4bb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1555.913276] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbe98a006a1f42a6b593e0d266b4d4bb [ 1555.915551] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1555.915703] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1555.915839] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1555.915968] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1555.916107] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1555.916231] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1555.916350] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1555.916493] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1555.916642] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1555.916777] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1555.916895] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1555.917392] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1555.917604] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1556.913116] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1557.891500] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1557.891757] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1557.891907] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1559.608830] env[62740]: WARNING oslo_vmware.rw_handles [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1559.608830] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1559.608830] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1559.608830] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1559.608830] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1559.608830] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1559.608830] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1559.608830] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1559.608830] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1559.608830] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1559.608830] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1559.608830] env[62740]: ERROR oslo_vmware.rw_handles [ 1559.609465] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/ac63b490-b5dc-4b1a-9434-b3e2cadc84f5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1559.611339] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1559.611601] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Copying Virtual Disk [datastore2] vmware_temp/ac63b490-b5dc-4b1a-9434-b3e2cadc84f5/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/ac63b490-b5dc-4b1a-9434-b3e2cadc84f5/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1559.611888] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d8b07a3-2272-4879-8b19-e4fd17f3ae75 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.619821] env[62740]: DEBUG oslo_vmware.api [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Waiting for the task: (returnval){ [ 1559.619821] env[62740]: value = "task-640297" [ 1559.619821] env[62740]: _type = "Task" [ 1559.619821] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.627969] env[62740]: DEBUG oslo_vmware.api [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Task: {'id': task-640297, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.890963] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1559.891388] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg a4552b42e1d944c580faca85b78e15ea in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1559.903216] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4552b42e1d944c580faca85b78e15ea [ 1559.904239] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.904459] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.904630] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1559.904788] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1559.905859] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f640bf-a31f-44a3-9566-b4751b0ec871 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.915056] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9010dc63-d5b7-4f4a-be7a-9bf8ab868e98 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.929225] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5225915d-065b-4bb7-b0b0-5dc594b66d69 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.935770] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07f1ecd-fc2f-4df7-9c27-297b56a0a02a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.965454] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181680MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1559.965591] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.965778] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.966612] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 329b3f2e011b4c5986ebf26eb69ab46f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1560.001058] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 329b3f2e011b4c5986ebf26eb69ab46f [ 1560.004934] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg cbf97f72039848bf91ebdcfd24efb8aa in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1560.014380] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbf97f72039848bf91ebdcfd24efb8aa [ 1560.044702] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance ba23ede2-be42-48ac-b281-571ccd158dee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.044870] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.045007] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2deff09f-d24f-4609-91f2-1585e8407c2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.045191] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3f36f081-2851-4339-860d-0a302ef4ee2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.045359] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 913ddb91-9d46-459e-8775-c9f380ed3cc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.045506] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 07efd13e-40d0-4158-b17c-6f5c75474ce3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.045643] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388d71f2-b229-4666-a53d-d5b07e498eed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.045797] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c0daf074-eecb-4899-938f-477031efc6d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.045926] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 61fea037-aac3-47ef-aa6a-5dfa657d840d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.046057] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 86c57375-8328-4344-b228-2f1ce6efc71e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.046587] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg c412498143f7422fbb25ac7c1dde75eb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1560.056169] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c412498143f7422fbb25ac7c1dde75eb [ 1560.056903] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a41506d2-33b2-40b8-badb-41312c7abbd2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1560.057355] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 3c8ebb4af02342e08f2d27bc06acae36 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1560.066816] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c8ebb4af02342e08f2d27bc06acae36 [ 1560.067130] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3aa2858e-d422-408a-a83a-98382f971add has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1560.067346] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1560.067493] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1560.129774] env[62740]: DEBUG oslo_vmware.exceptions [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1560.132109] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1560.132663] env[62740]: ERROR nova.compute.manager [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1560.132663] env[62740]: Faults: ['InvalidArgument'] [ 1560.132663] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Traceback (most recent call last): [ 1560.132663] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1560.132663] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] yield resources [ 1560.132663] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1560.132663] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] self.driver.spawn(context, instance, image_meta, [ 1560.132663] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1560.132663] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1560.132663] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1560.132663] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] self._fetch_image_if_missing(context, vi) [ 1560.132663] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1560.132991] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] image_cache(vi, tmp_image_ds_loc) [ 1560.132991] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1560.132991] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] vm_util.copy_virtual_disk( [ 1560.132991] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1560.132991] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] session._wait_for_task(vmdk_copy_task) [ 1560.132991] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1560.132991] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] return self.wait_for_task(task_ref) [ 1560.132991] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1560.132991] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] return evt.wait() [ 1560.132991] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1560.132991] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] result = hub.switch() [ 1560.132991] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1560.132991] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] return self.greenlet.switch() [ 1560.133480] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1560.133480] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] self.f(*self.args, **self.kw) [ 1560.133480] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1560.133480] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] raise exceptions.translate_fault(task_info.error) [ 1560.133480] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1560.133480] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Faults: ['InvalidArgument'] [ 1560.133480] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] [ 1560.133480] env[62740]: INFO nova.compute.manager [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Terminating instance [ 1560.134546] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.134762] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1560.135258] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-782c0296-5612-4b6d-929a-b84f457a060f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.137283] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "refresh_cache-ba23ede2-be42-48ac-b281-571ccd158dee" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1560.137444] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquired lock "refresh_cache-ba23ede2-be42-48ac-b281-571ccd158dee" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.137612] env[62740]: DEBUG nova.network.neutron [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1560.138032] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg d7bd92fbd1b7487981fee921106fee3e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1560.146103] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1560.146103] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1560.146802] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7bd92fbd1b7487981fee921106fee3e [ 1560.147150] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adadfc71-9242-41af-b51c-c578a54c59e5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.156320] env[62740]: DEBUG oslo_vmware.api [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Waiting for the task: (returnval){ [ 1560.156320] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52563b74-dbd7-a705-8140-05c400206a06" [ 1560.156320] env[62740]: _type = "Task" [ 1560.156320] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.163927] env[62740]: DEBUG oslo_vmware.api [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52563b74-dbd7-a705-8140-05c400206a06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.172222] env[62740]: DEBUG nova.network.neutron [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1560.218269] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b1754c-a9ce-4f14-9317-59e041f2b0a3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.225280] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e2a916-0daa-421a-8556-46d3eefb8a72 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.255420] env[62740]: DEBUG nova.network.neutron [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1560.255904] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg e6daed4af98d4dfeb1d82ac5315ff2e2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1560.257185] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae08ac2-5655-455b-8e5b-205602bbbcd3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.264968] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4170f7e-bb88-4ac7-9e87-d432f909eeaf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.269344] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6daed4af98d4dfeb1d82ac5315ff2e2 [ 1560.269916] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Releasing lock "refresh_cache-ba23ede2-be42-48ac-b281-571ccd158dee" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1560.270312] env[62740]: DEBUG nova.compute.manager [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1560.270503] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1560.271754] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167bab51-8d62-4608-afad-e0624a5cac65 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.281569] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1560.281997] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 4349a917a7154a1d8ef2f955997db62a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1560.287022] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1560.287254] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7eb136c6-cc05-4d5c-a828-fa907ba46bc6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.289447] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4349a917a7154a1d8ef2f955997db62a [ 1560.290281] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1560.292450] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f21e211e9984412ca1ba274e52fb548f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1560.302719] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f21e211e9984412ca1ba274e52fb548f [ 1560.303361] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1560.303545] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.338s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.318194] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1560.318365] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1560.318606] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Deleting the datastore file [datastore2] ba23ede2-be42-48ac-b281-571ccd158dee {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1560.318845] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-859a5bf5-3432-4ede-9806-f4e63f8e7349 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.325499] env[62740]: DEBUG oslo_vmware.api [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Waiting for the task: (returnval){ [ 1560.325499] env[62740]: value = "task-640299" [ 1560.325499] env[62740]: _type = "Task" [ 1560.325499] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.332898] env[62740]: DEBUG oslo_vmware.api [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Task: {'id': task-640299, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.667055] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1560.667055] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Creating directory with path [datastore2] vmware_temp/a117e2d6-03e3-4922-9419-386c171201df/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1560.667055] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-527f02bd-96ad-4796-adf6-059a32db6908 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.677863] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Created directory with path [datastore2] vmware_temp/a117e2d6-03e3-4922-9419-386c171201df/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1560.678066] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Fetch image to [datastore2] vmware_temp/a117e2d6-03e3-4922-9419-386c171201df/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1560.678242] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/a117e2d6-03e3-4922-9419-386c171201df/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1560.678971] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71e9c7f-1ab8-47fa-9290-bbfd3d0633ff {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.685342] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fc10a9-e742-4ec9-ac0f-c4e87e3dc903 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.693932] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee86183-0af5-405d-884c-93cc1bfe89d8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.724149] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5b29c9-3637-4a4e-a0cc-1a3843961787 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.729390] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1b3553b1-05d4-477a-b968-3ef575478490 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.748717] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1560.796059] env[62740]: DEBUG oslo_vmware.rw_handles [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a117e2d6-03e3-4922-9419-386c171201df/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1560.858923] env[62740]: DEBUG oslo_vmware.rw_handles [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1560.858923] env[62740]: DEBUG oslo_vmware.rw_handles [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a117e2d6-03e3-4922-9419-386c171201df/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1560.862726] env[62740]: DEBUG oslo_vmware.api [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Task: {'id': task-640299, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033016} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.862990] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1560.863194] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1560.863385] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1560.863577] env[62740]: INFO nova.compute.manager [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1560.863820] env[62740]: DEBUG oslo.service.loopingcall [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1560.864047] env[62740]: DEBUG nova.compute.manager [-] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Skipping network deallocation for instance since networking was not requested. {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1560.866375] env[62740]: DEBUG nova.compute.claims [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1560.866559] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.866788] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.868695] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg f3f9869a53ec44018b90d16d9b1b62a3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1560.902359] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3f9869a53ec44018b90d16d9b1b62a3 [ 1561.034309] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f81c08-29db-4dcf-809d-eb3572d4eaa0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.041930] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510c9ef5-77b4-40e9-a584-b5245c5b4624 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.070274] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b731fa22-3ff8-4deb-8d1d-5637645d5750 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.076985] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5edbd766-49c9-47a2-8245-37df23a5835f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.089180] env[62740]: DEBUG nova.compute.provider_tree [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.089652] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg feb74d95677e42bda76583af2e763b7e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.096765] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg feb74d95677e42bda76583af2e763b7e [ 1561.097582] env[62740]: DEBUG nova.scheduler.client.report [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1561.099771] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 882533ebe3e34bf7837f65966f033315 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.110501] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 882533ebe3e34bf7837f65966f033315 [ 1561.111183] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.244s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.111686] env[62740]: ERROR nova.compute.manager [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1561.111686] env[62740]: Faults: ['InvalidArgument'] [ 1561.111686] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Traceback (most recent call last): [ 1561.111686] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1561.111686] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] self.driver.spawn(context, instance, image_meta, [ 1561.111686] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1561.111686] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1561.111686] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1561.111686] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] self._fetch_image_if_missing(context, vi) [ 1561.111686] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1561.111686] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] image_cache(vi, tmp_image_ds_loc) [ 1561.111686] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1561.112067] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] vm_util.copy_virtual_disk( [ 1561.112067] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1561.112067] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] session._wait_for_task(vmdk_copy_task) [ 1561.112067] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1561.112067] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] return self.wait_for_task(task_ref) [ 1561.112067] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1561.112067] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] return evt.wait() [ 1561.112067] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1561.112067] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] result = hub.switch() [ 1561.112067] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1561.112067] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] return self.greenlet.switch() [ 1561.112067] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1561.112067] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] self.f(*self.args, **self.kw) [ 1561.112430] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1561.112430] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] raise exceptions.translate_fault(task_info.error) [ 1561.112430] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1561.112430] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Faults: ['InvalidArgument'] [ 1561.112430] env[62740]: ERROR nova.compute.manager [instance: ba23ede2-be42-48ac-b281-571ccd158dee] [ 1561.112430] env[62740]: DEBUG nova.compute.utils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1561.113755] env[62740]: DEBUG nova.compute.manager [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Build of instance ba23ede2-be42-48ac-b281-571ccd158dee was re-scheduled: A specified parameter was not correct: fileType [ 1561.113755] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1561.114165] env[62740]: DEBUG nova.compute.manager [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1561.114389] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "refresh_cache-ba23ede2-be42-48ac-b281-571ccd158dee" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.114539] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquired lock "refresh_cache-ba23ede2-be42-48ac-b281-571ccd158dee" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.114705] env[62740]: DEBUG nova.network.neutron [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1561.115088] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg beb214dd97654ca09c9a4d70d6831a53 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.120576] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg beb214dd97654ca09c9a4d70d6831a53 [ 1561.138185] env[62740]: DEBUG nova.network.neutron [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1561.206393] env[62740]: DEBUG nova.network.neutron [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.206956] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg dc67d0febea04f4d9854cc481fec3039 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.216231] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc67d0febea04f4d9854cc481fec3039 [ 1561.216985] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Releasing lock "refresh_cache-ba23ede2-be42-48ac-b281-571ccd158dee" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.217262] env[62740]: DEBUG nova.compute.manager [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1561.217485] env[62740]: DEBUG nova.compute.manager [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Skipping network deallocation for instance since networking was not requested. {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1561.219520] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 1a0368384d9c42c085c3ac735c245116 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.253744] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a0368384d9c42c085c3ac735c245116 [ 1561.256702] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 12941cbb86ad44de9026924454777a9f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.288021] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12941cbb86ad44de9026924454777a9f [ 1561.303640] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1561.310529] env[62740]: INFO nova.scheduler.client.report [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Deleted allocations for instance ba23ede2-be42-48ac-b281-571ccd158dee [ 1561.316611] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg c38b3ef0458044d0ab7a9dac1a9d5480 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.333041] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c38b3ef0458044d0ab7a9dac1a9d5480 [ 1561.333623] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7eca7f66-8313-4799-ae0a-cc46926fbbc3 tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "ba23ede2-be42-48ac-b281-571ccd158dee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 431.561s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.334201] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg b91bb0fb396c44c4bd85c61aa0891463 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.334934] env[62740]: DEBUG oslo_concurrency.lockutils [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "ba23ede2-be42-48ac-b281-571ccd158dee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 235.756s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.335189] env[62740]: DEBUG oslo_concurrency.lockutils [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "ba23ede2-be42-48ac-b281-571ccd158dee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.335393] env[62740]: DEBUG oslo_concurrency.lockutils [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "ba23ede2-be42-48ac-b281-571ccd158dee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.335559] env[62740]: DEBUG oslo_concurrency.lockutils [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "ba23ede2-be42-48ac-b281-571ccd158dee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.337711] env[62740]: INFO nova.compute.manager [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Terminating instance [ 1561.339355] env[62740]: DEBUG oslo_concurrency.lockutils [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquiring lock "refresh_cache-ba23ede2-be42-48ac-b281-571ccd158dee" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.339532] env[62740]: DEBUG oslo_concurrency.lockutils [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Acquired lock "refresh_cache-ba23ede2-be42-48ac-b281-571ccd158dee" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.339701] env[62740]: DEBUG nova.network.neutron [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1561.340124] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 36009373255d4fbc8eca3d84cb6d2a62 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.346257] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36009373255d4fbc8eca3d84cb6d2a62 [ 1561.355565] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b91bb0fb396c44c4bd85c61aa0891463 [ 1561.355969] env[62740]: DEBUG nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1561.357645] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 2daeebc5b2164371a9a57986d51adb5a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.365276] env[62740]: DEBUG nova.network.neutron [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1561.390267] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2daeebc5b2164371a9a57986d51adb5a [ 1561.405889] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.406186] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.407877] env[62740]: INFO nova.compute.claims [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1561.409523] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 30959de3d7fa48b591e2433299d2e631 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.429179] env[62740]: DEBUG nova.network.neutron [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.429768] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg f90b68a8f5d54f699eb628d5cefea02a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.438685] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f90b68a8f5d54f699eb628d5cefea02a [ 1561.439430] env[62740]: DEBUG oslo_concurrency.lockutils [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Releasing lock "refresh_cache-ba23ede2-be42-48ac-b281-571ccd158dee" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.440096] env[62740]: DEBUG nova.compute.manager [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1561.440343] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1561.441519] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df7338e8-442e-4104-a8c7-6fd202e4d7dd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.444585] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30959de3d7fa48b591e2433299d2e631 [ 1561.446656] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 53f1f228f4f8401da361d30794ed0309 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.454480] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e238e3ba-2bdd-4e8b-82a3-d2f9fc1cb33e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.466130] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53f1f228f4f8401da361d30794ed0309 [ 1561.486636] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ba23ede2-be42-48ac-b281-571ccd158dee could not be found. [ 1561.486844] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1561.487037] env[62740]: INFO nova.compute.manager [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1561.487288] env[62740]: DEBUG oslo.service.loopingcall [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1561.489866] env[62740]: DEBUG nova.compute.manager [-] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1561.489973] env[62740]: DEBUG nova.network.neutron [-] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1561.510447] env[62740]: DEBUG nova.network.neutron [-] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1561.511062] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e955736823a648f1baa6b836bc770972 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.518812] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e955736823a648f1baa6b836bc770972 [ 1561.519225] env[62740]: DEBUG nova.network.neutron [-] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.519662] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c903e4afb2034a6caf3e126c119f2b95 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.529934] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c903e4afb2034a6caf3e126c119f2b95 [ 1561.530435] env[62740]: INFO nova.compute.manager [-] [instance: ba23ede2-be42-48ac-b281-571ccd158dee] Took 0.04 seconds to deallocate network for instance. [ 1561.534351] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg 015fc78b8c314f7d8fb4b32c45303ea4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.567163] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 015fc78b8c314f7d8fb4b32c45303ea4 [ 1561.580744] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg bde7dc5ab24145c1b441c3c2d4171015 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.623916] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bde7dc5ab24145c1b441c3c2d4171015 [ 1561.626905] env[62740]: DEBUG oslo_concurrency.lockutils [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Lock "ba23ede2-be42-48ac-b281-571ccd158dee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.292s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.627047] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-17e10e78-752c-4810-aaf7-a363cb7ae39a tempest-ServerShowV247Test-1392334177 tempest-ServerShowV247Test-1392334177-project-member] Expecting reply to msg c63230a09dbc4f5fa4da5f7469f8af46 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.630379] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ede781d-2e32-43b0-865b-a17b77c4bd38 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.637835] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e5e883-7cfd-415e-854b-70c24e45ed38 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.641095] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c63230a09dbc4f5fa4da5f7469f8af46 [ 1561.669659] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5813d87b-c3da-49d7-8bfb-867112ea69fa {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.677097] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa76c047-3f10-4946-a591-f5c63266aabc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.691108] env[62740]: DEBUG nova.compute.provider_tree [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.692072] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 409decdfc54a4ca598c2fcc177c7b55b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.701203] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 409decdfc54a4ca598c2fcc177c7b55b [ 1561.702266] env[62740]: DEBUG nova.scheduler.client.report [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1561.704518] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg f0e9acb1f25e4ef4993845142ce63a9a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.716973] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0e9acb1f25e4ef4993845142ce63a9a [ 1561.717771] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.312s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.718270] env[62740]: DEBUG nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1561.719975] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg e6bea4150ca84e42a26c803c703c53f7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.753030] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6bea4150ca84e42a26c803c703c53f7 [ 1561.754787] env[62740]: DEBUG nova.compute.utils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1561.755410] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 331651dca4f949dfb52dac10a291d187 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.759455] env[62740]: DEBUG nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1561.759455] env[62740]: DEBUG nova.network.neutron [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1561.768571] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 331651dca4f949dfb52dac10a291d187 [ 1561.769167] env[62740]: DEBUG nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1561.770784] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 307edce5f32e4ab38a20799916d5ea7e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.799482] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 307edce5f32e4ab38a20799916d5ea7e [ 1561.802620] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg e066f269106947e292e67f24d0179c31 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1561.832389] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e066f269106947e292e67f24d0179c31 [ 1561.833299] env[62740]: DEBUG nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1561.854438] env[62740]: DEBUG nova.policy [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92b7913efc094fd090cd51f76f3eaf4b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1362f27348894a139cf80a8ea6449984', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1561.861396] env[62740]: DEBUG nova.virt.hardware [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1561.861396] env[62740]: DEBUG nova.virt.hardware [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1561.861396] env[62740]: DEBUG nova.virt.hardware [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1561.861580] env[62740]: DEBUG nova.virt.hardware [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1561.861580] env[62740]: DEBUG nova.virt.hardware [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1561.861580] env[62740]: DEBUG nova.virt.hardware [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1561.861580] env[62740]: DEBUG nova.virt.hardware [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1561.861580] env[62740]: DEBUG nova.virt.hardware [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1561.861902] env[62740]: DEBUG nova.virt.hardware [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1561.862207] env[62740]: DEBUG nova.virt.hardware [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1561.862499] env[62740]: DEBUG nova.virt.hardware [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1561.863626] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7083018-5a8f-4721-a57d-2c31a2dd4b28 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.873869] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f6e33f-9404-4a76-b759-5f3d8d304255 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.890403] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1562.224582] env[62740]: DEBUG nova.network.neutron [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Successfully created port: 25a65650-3dc5-4551-b9f2-5270d03f3358 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1562.524563] env[62740]: DEBUG nova.network.neutron [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Successfully created port: cbd60736-f033-4d5c-b1ae-9d515b1bd537 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1562.945734] env[62740]: DEBUG nova.network.neutron [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Successfully created port: e868c59c-4dd6-411f-8539-4108330238df {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1563.587681] env[62740]: DEBUG nova.compute.manager [req-c30e3bf0-1656-4979-9c97-342aa977eef4 req-8e34a0f1-219e-4d9c-a772-293fdd922b81 service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Received event network-vif-plugged-25a65650-3dc5-4551-b9f2-5270d03f3358 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1563.588262] env[62740]: DEBUG oslo_concurrency.lockutils [req-c30e3bf0-1656-4979-9c97-342aa977eef4 req-8e34a0f1-219e-4d9c-a772-293fdd922b81 service nova] Acquiring lock "a41506d2-33b2-40b8-badb-41312c7abbd2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.588483] env[62740]: DEBUG oslo_concurrency.lockutils [req-c30e3bf0-1656-4979-9c97-342aa977eef4 req-8e34a0f1-219e-4d9c-a772-293fdd922b81 service nova] Lock "a41506d2-33b2-40b8-badb-41312c7abbd2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.588787] env[62740]: DEBUG oslo_concurrency.lockutils [req-c30e3bf0-1656-4979-9c97-342aa977eef4 req-8e34a0f1-219e-4d9c-a772-293fdd922b81 service nova] Lock "a41506d2-33b2-40b8-badb-41312c7abbd2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.588983] env[62740]: DEBUG nova.compute.manager [req-c30e3bf0-1656-4979-9c97-342aa977eef4 req-8e34a0f1-219e-4d9c-a772-293fdd922b81 service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] No waiting events found dispatching network-vif-plugged-25a65650-3dc5-4551-b9f2-5270d03f3358 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1563.589328] env[62740]: WARNING nova.compute.manager [req-c30e3bf0-1656-4979-9c97-342aa977eef4 req-8e34a0f1-219e-4d9c-a772-293fdd922b81 service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Received unexpected event network-vif-plugged-25a65650-3dc5-4551-b9f2-5270d03f3358 for instance with vm_state building and task_state spawning. [ 1563.669406] env[62740]: DEBUG nova.network.neutron [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Successfully updated port: 25a65650-3dc5-4551-b9f2-5270d03f3358 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1563.670412] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 4231b507896a444c83dad75863a8ed8c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1563.682383] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4231b507896a444c83dad75863a8ed8c [ 1564.633381] env[62740]: DEBUG nova.network.neutron [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Successfully updated port: cbd60736-f033-4d5c-b1ae-9d515b1bd537 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1564.633381] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg f7d05e9f870b4a2f9fe0164cf029dc4c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1564.643776] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7d05e9f870b4a2f9fe0164cf029dc4c [ 1565.331593] env[62740]: DEBUG nova.network.neutron [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Successfully updated port: e868c59c-4dd6-411f-8539-4108330238df {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1565.331593] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 5e391c43fb1c48a8ae435fe4a6bc3c65 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1565.341109] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e391c43fb1c48a8ae435fe4a6bc3c65 [ 1565.341817] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "refresh_cache-a41506d2-33b2-40b8-badb-41312c7abbd2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1565.341961] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquired lock "refresh_cache-a41506d2-33b2-40b8-badb-41312c7abbd2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1565.342125] env[62740]: DEBUG nova.network.neutron [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1565.342508] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 6a6c8964729245eda26f08377e493d0e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1565.353216] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a6c8964729245eda26f08377e493d0e [ 1565.409282] env[62740]: DEBUG nova.network.neutron [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1565.621487] env[62740]: DEBUG nova.compute.manager [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Received event network-changed-25a65650-3dc5-4551-b9f2-5270d03f3358 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1565.621778] env[62740]: DEBUG nova.compute.manager [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Refreshing instance network info cache due to event network-changed-25a65650-3dc5-4551-b9f2-5270d03f3358. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1565.622297] env[62740]: DEBUG oslo_concurrency.lockutils [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Acquiring lock "refresh_cache-a41506d2-33b2-40b8-badb-41312c7abbd2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.038604] env[62740]: DEBUG nova.network.neutron [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Updating instance_info_cache with network_info: [{"id": "25a65650-3dc5-4551-b9f2-5270d03f3358", "address": "fa:16:3e:a7:e3:6a", "network": {"id": "3b2df5ae-8737-41ea-b17b-13aa6bd8544f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1832914782", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25a65650-3d", "ovs_interfaceid": "25a65650-3dc5-4551-b9f2-5270d03f3358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cbd60736-f033-4d5c-b1ae-9d515b1bd537", "address": "fa:16:3e:d0:3e:db", "network": {"id": "e17030eb-f674-4a7c-9855-a5efc3a30ea2", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1833387005", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c7eaa8-06f3-40c3-93ae-7593486eb870", "external-id": "nsx-vlan-transportzone-20", "segmentation_id": 20, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbd60736-f0", "ovs_interfaceid": "cbd60736-f033-4d5c-b1ae-9d515b1bd537", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e868c59c-4dd6-411f-8539-4108330238df", "address": "fa:16:3e:4e:07:99", "network": {"id": "3b2df5ae-8737-41ea-b17b-13aa6bd8544f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1832914782", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape868c59c-4d", "ovs_interfaceid": "e868c59c-4dd6-411f-8539-4108330238df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1566.039238] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg f03435db29e248388ea1e0d16e3bd57a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1566.054163] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f03435db29e248388ea1e0d16e3bd57a [ 1566.054782] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Releasing lock "refresh_cache-a41506d2-33b2-40b8-badb-41312c7abbd2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1566.055153] env[62740]: DEBUG nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Instance network_info: |[{"id": "25a65650-3dc5-4551-b9f2-5270d03f3358", "address": "fa:16:3e:a7:e3:6a", "network": {"id": "3b2df5ae-8737-41ea-b17b-13aa6bd8544f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1832914782", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25a65650-3d", "ovs_interfaceid": "25a65650-3dc5-4551-b9f2-5270d03f3358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cbd60736-f033-4d5c-b1ae-9d515b1bd537", "address": "fa:16:3e:d0:3e:db", "network": {"id": "e17030eb-f674-4a7c-9855-a5efc3a30ea2", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1833387005", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c7eaa8-06f3-40c3-93ae-7593486eb870", "external-id": "nsx-vlan-transportzone-20", "segmentation_id": 20, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbd60736-f0", "ovs_interfaceid": "cbd60736-f033-4d5c-b1ae-9d515b1bd537", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e868c59c-4dd6-411f-8539-4108330238df", "address": "fa:16:3e:4e:07:99", "network": {"id": "3b2df5ae-8737-41ea-b17b-13aa6bd8544f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1832914782", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape868c59c-4d", "ovs_interfaceid": "e868c59c-4dd6-411f-8539-4108330238df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1566.055461] env[62740]: DEBUG oslo_concurrency.lockutils [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Acquired lock "refresh_cache-a41506d2-33b2-40b8-badb-41312c7abbd2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.055644] env[62740]: DEBUG nova.network.neutron [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Refreshing network info cache for port 25a65650-3dc5-4551-b9f2-5270d03f3358 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1566.056060] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Expecting reply to msg d42ccce11e42476286e37893e342d9e7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1566.057325] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:e3:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ff3ecd2f-0b10-4faf-a512-fd7a20c28df1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '25a65650-3dc5-4551-b9f2-5270d03f3358', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:3e:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0c7eaa8-06f3-40c3-93ae-7593486eb870', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cbd60736-f033-4d5c-b1ae-9d515b1bd537', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:07:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ff3ecd2f-0b10-4faf-a512-fd7a20c28df1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e868c59c-4dd6-411f-8539-4108330238df', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1566.068109] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Creating folder: Project (1362f27348894a139cf80a8ea6449984). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1566.068910] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d42ccce11e42476286e37893e342d9e7 [ 1566.069305] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0036ec5f-0116-4f8e-9f1a-88e3688c5921 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.084234] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Created folder: Project (1362f27348894a139cf80a8ea6449984) in parent group-v156037. [ 1566.085042] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Creating folder: Instances. Parent ref: group-v156166. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1566.085042] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f65f7f8c-be0b-4896-bba1-d91a5cc5636b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.094913] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Created folder: Instances in parent group-v156166. [ 1566.094913] env[62740]: DEBUG oslo.service.loopingcall [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1566.094913] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1566.094913] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c558a6f-46ae-47a0-8515-8125daa641c7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.120187] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1566.120187] env[62740]: value = "task-640302" [ 1566.120187] env[62740]: _type = "Task" [ 1566.120187] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.127843] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640302, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.401712] env[62740]: DEBUG nova.network.neutron [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Updated VIF entry in instance network info cache for port 25a65650-3dc5-4551-b9f2-5270d03f3358. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1566.406123] env[62740]: DEBUG nova.network.neutron [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Updating instance_info_cache with network_info: [{"id": "25a65650-3dc5-4551-b9f2-5270d03f3358", "address": "fa:16:3e:a7:e3:6a", "network": {"id": "3b2df5ae-8737-41ea-b17b-13aa6bd8544f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1832914782", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25a65650-3d", "ovs_interfaceid": "25a65650-3dc5-4551-b9f2-5270d03f3358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cbd60736-f033-4d5c-b1ae-9d515b1bd537", "address": "fa:16:3e:d0:3e:db", "network": {"id": "e17030eb-f674-4a7c-9855-a5efc3a30ea2", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1833387005", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c7eaa8-06f3-40c3-93ae-7593486eb870", "external-id": "nsx-vlan-transportzone-20", "segmentation_id": 20, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbd60736-f0", "ovs_interfaceid": "cbd60736-f033-4d5c-b1ae-9d515b1bd537", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e868c59c-4dd6-411f-8539-4108330238df", "address": "fa:16:3e:4e:07:99", "network": {"id": "3b2df5ae-8737-41ea-b17b-13aa6bd8544f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1832914782", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape868c59c-4d", "ovs_interfaceid": "e868c59c-4dd6-411f-8539-4108330238df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1566.406123] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Expecting reply to msg 4b4b226b4e3549a3862d52fda4cf122a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1566.413231] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b4b226b4e3549a3862d52fda4cf122a [ 1566.414197] env[62740]: DEBUG oslo_concurrency.lockutils [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Releasing lock "refresh_cache-a41506d2-33b2-40b8-badb-41312c7abbd2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1566.414197] env[62740]: DEBUG nova.compute.manager [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Received event network-vif-plugged-cbd60736-f033-4d5c-b1ae-9d515b1bd537 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1566.414365] env[62740]: DEBUG oslo_concurrency.lockutils [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Acquiring lock "a41506d2-33b2-40b8-badb-41312c7abbd2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.414564] env[62740]: DEBUG oslo_concurrency.lockutils [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Lock "a41506d2-33b2-40b8-badb-41312c7abbd2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.414722] env[62740]: DEBUG oslo_concurrency.lockutils [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Lock "a41506d2-33b2-40b8-badb-41312c7abbd2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.414886] env[62740]: DEBUG nova.compute.manager [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] No waiting events found dispatching network-vif-plugged-cbd60736-f033-4d5c-b1ae-9d515b1bd537 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1566.415224] env[62740]: WARNING nova.compute.manager [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Received unexpected event network-vif-plugged-cbd60736-f033-4d5c-b1ae-9d515b1bd537 for instance with vm_state building and task_state spawning. [ 1566.415440] env[62740]: DEBUG nova.compute.manager [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Received event network-changed-cbd60736-f033-4d5c-b1ae-9d515b1bd537 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1566.416084] env[62740]: DEBUG nova.compute.manager [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Refreshing instance network info cache due to event network-changed-cbd60736-f033-4d5c-b1ae-9d515b1bd537. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1566.416084] env[62740]: DEBUG oslo_concurrency.lockutils [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Acquiring lock "refresh_cache-a41506d2-33b2-40b8-badb-41312c7abbd2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.416084] env[62740]: DEBUG oslo_concurrency.lockutils [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Acquired lock "refresh_cache-a41506d2-33b2-40b8-badb-41312c7abbd2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.416084] env[62740]: DEBUG nova.network.neutron [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Refreshing network info cache for port cbd60736-f033-4d5c-b1ae-9d515b1bd537 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1566.416658] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Expecting reply to msg 40f6922c9294449085268d78a7731e45 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1566.427308] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40f6922c9294449085268d78a7731e45 [ 1566.640709] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640302, 'name': CreateVM_Task, 'duration_secs': 0.379275} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.640889] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1566.642032] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.642298] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.642620] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1566.642886] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3edc1b1-fcf3-47d5-994c-db9c11d6f535 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.651020] env[62740]: DEBUG oslo_vmware.api [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Waiting for the task: (returnval){ [ 1566.651020] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52dcd99c-274f-33e2-a75b-04351c917670" [ 1566.651020] env[62740]: _type = "Task" [ 1566.651020] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.655528] env[62740]: DEBUG oslo_vmware.api [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52dcd99c-274f-33e2-a75b-04351c917670, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.017297] env[62740]: DEBUG nova.network.neutron [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Updated VIF entry in instance network info cache for port cbd60736-f033-4d5c-b1ae-9d515b1bd537. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1567.017792] env[62740]: DEBUG nova.network.neutron [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Updating instance_info_cache with network_info: [{"id": "25a65650-3dc5-4551-b9f2-5270d03f3358", "address": "fa:16:3e:a7:e3:6a", "network": {"id": "3b2df5ae-8737-41ea-b17b-13aa6bd8544f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1832914782", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25a65650-3d", "ovs_interfaceid": "25a65650-3dc5-4551-b9f2-5270d03f3358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cbd60736-f033-4d5c-b1ae-9d515b1bd537", "address": "fa:16:3e:d0:3e:db", "network": {"id": "e17030eb-f674-4a7c-9855-a5efc3a30ea2", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1833387005", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c7eaa8-06f3-40c3-93ae-7593486eb870", "external-id": "nsx-vlan-transportzone-20", "segmentation_id": 20, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbd60736-f0", "ovs_interfaceid": "cbd60736-f033-4d5c-b1ae-9d515b1bd537", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e868c59c-4dd6-411f-8539-4108330238df", "address": "fa:16:3e:4e:07:99", "network": {"id": "3b2df5ae-8737-41ea-b17b-13aa6bd8544f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1832914782", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape868c59c-4d", "ovs_interfaceid": "e868c59c-4dd6-411f-8539-4108330238df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.018412] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Expecting reply to msg f1db1fe94742463eb7dff693afd2dfee in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1567.028073] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1db1fe94742463eb7dff693afd2dfee [ 1567.028752] env[62740]: DEBUG oslo_concurrency.lockutils [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Releasing lock "refresh_cache-a41506d2-33b2-40b8-badb-41312c7abbd2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.028988] env[62740]: DEBUG nova.compute.manager [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Received event network-vif-plugged-e868c59c-4dd6-411f-8539-4108330238df {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1567.029387] env[62740]: DEBUG oslo_concurrency.lockutils [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Acquiring lock "a41506d2-33b2-40b8-badb-41312c7abbd2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.029651] env[62740]: DEBUG oslo_concurrency.lockutils [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Lock "a41506d2-33b2-40b8-badb-41312c7abbd2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.029809] env[62740]: DEBUG oslo_concurrency.lockutils [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Lock "a41506d2-33b2-40b8-badb-41312c7abbd2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.029974] env[62740]: DEBUG nova.compute.manager [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] No waiting events found dispatching network-vif-plugged-e868c59c-4dd6-411f-8539-4108330238df {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1567.030160] env[62740]: WARNING nova.compute.manager [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Received unexpected event network-vif-plugged-e868c59c-4dd6-411f-8539-4108330238df for instance with vm_state building and task_state spawning. [ 1567.030327] env[62740]: DEBUG nova.compute.manager [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Received event network-changed-e868c59c-4dd6-411f-8539-4108330238df {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1567.030486] env[62740]: DEBUG nova.compute.manager [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Refreshing instance network info cache due to event network-changed-e868c59c-4dd6-411f-8539-4108330238df. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1567.030673] env[62740]: DEBUG oslo_concurrency.lockutils [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Acquiring lock "refresh_cache-a41506d2-33b2-40b8-badb-41312c7abbd2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1567.030812] env[62740]: DEBUG oslo_concurrency.lockutils [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Acquired lock "refresh_cache-a41506d2-33b2-40b8-badb-41312c7abbd2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.030970] env[62740]: DEBUG nova.network.neutron [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Refreshing network info cache for port e868c59c-4dd6-411f-8539-4108330238df {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1567.031506] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Expecting reply to msg bed49ab922494d6e9f9f78ea2b909ec1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1567.040060] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bed49ab922494d6e9f9f78ea2b909ec1 [ 1567.159951] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.160220] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1567.160421] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1567.323251] env[62740]: DEBUG nova.network.neutron [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Updated VIF entry in instance network info cache for port e868c59c-4dd6-411f-8539-4108330238df. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1567.323789] env[62740]: DEBUG nova.network.neutron [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Updating instance_info_cache with network_info: [{"id": "25a65650-3dc5-4551-b9f2-5270d03f3358", "address": "fa:16:3e:a7:e3:6a", "network": {"id": "3b2df5ae-8737-41ea-b17b-13aa6bd8544f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1832914782", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25a65650-3d", "ovs_interfaceid": "25a65650-3dc5-4551-b9f2-5270d03f3358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cbd60736-f033-4d5c-b1ae-9d515b1bd537", "address": "fa:16:3e:d0:3e:db", "network": {"id": "e17030eb-f674-4a7c-9855-a5efc3a30ea2", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1833387005", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c7eaa8-06f3-40c3-93ae-7593486eb870", "external-id": "nsx-vlan-transportzone-20", "segmentation_id": 20, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbd60736-f0", "ovs_interfaceid": "cbd60736-f033-4d5c-b1ae-9d515b1bd537", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e868c59c-4dd6-411f-8539-4108330238df", "address": "fa:16:3e:4e:07:99", "network": {"id": "3b2df5ae-8737-41ea-b17b-13aa6bd8544f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1832914782", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape868c59c-4d", "ovs_interfaceid": "e868c59c-4dd6-411f-8539-4108330238df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.324425] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Expecting reply to msg 55841d402df145ccb7bce2eb6b9caa60 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1567.333159] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55841d402df145ccb7bce2eb6b9caa60 [ 1567.333791] env[62740]: DEBUG oslo_concurrency.lockutils [req-dad66845-13b9-4d88-a441-9c66ee20c7fb req-1684c840-e9ab-48c9-bafa-edd0e7cde17e service nova] Releasing lock "refresh_cache-a41506d2-33b2-40b8-badb-41312c7abbd2" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1568.972276] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.972593] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1570.444047] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 6435497be30f4f26adbe0e2e6950a98d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1570.456898] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6435497be30f4f26adbe0e2e6950a98d [ 1570.457453] env[62740]: DEBUG oslo_concurrency.lockutils [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "a41506d2-33b2-40b8-badb-41312c7abbd2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.495971] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.496308] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.601147] env[62740]: WARNING oslo_vmware.rw_handles [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1606.601147] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1606.601147] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1606.601147] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1606.601147] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1606.601147] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1606.601147] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1606.601147] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1606.601147] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1606.601147] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1606.601147] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1606.601147] env[62740]: ERROR oslo_vmware.rw_handles [ 1606.601858] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/a117e2d6-03e3-4922-9419-386c171201df/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1606.603493] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1606.603736] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Copying Virtual Disk [datastore2] vmware_temp/a117e2d6-03e3-4922-9419-386c171201df/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/a117e2d6-03e3-4922-9419-386c171201df/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1606.604055] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8171aead-9eb1-463a-9ec3-5784e1e27c13 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.612151] env[62740]: DEBUG oslo_vmware.api [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Waiting for the task: (returnval){ [ 1606.612151] env[62740]: value = "task-640303" [ 1606.612151] env[62740]: _type = "Task" [ 1606.612151] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.619899] env[62740]: DEBUG oslo_vmware.api [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Task: {'id': task-640303, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.123086] env[62740]: DEBUG oslo_vmware.exceptions [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1607.123368] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.123951] env[62740]: ERROR nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1607.123951] env[62740]: Faults: ['InvalidArgument'] [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Traceback (most recent call last): [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] yield resources [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] self.driver.spawn(context, instance, image_meta, [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] self._fetch_image_if_missing(context, vi) [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] image_cache(vi, tmp_image_ds_loc) [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] vm_util.copy_virtual_disk( [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] session._wait_for_task(vmdk_copy_task) [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] return self.wait_for_task(task_ref) [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] return evt.wait() [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] result = hub.switch() [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] return self.greenlet.switch() [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] self.f(*self.args, **self.kw) [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] raise exceptions.translate_fault(task_info.error) [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Faults: ['InvalidArgument'] [ 1607.123951] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] [ 1607.125086] env[62740]: INFO nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Terminating instance [ 1607.125846] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.126064] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1607.126304] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-648ff29d-5695-4daa-89fc-98361f7a0254 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.128496] env[62740]: DEBUG nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1607.128720] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1607.129473] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5710e5b-817d-4cff-abf9-ab6224d7abb3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.136230] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1607.136454] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d22be9a3-bd4c-4127-96e1-2c0c82185e8b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.138695] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1607.138886] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1607.139888] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-831615e8-9f6e-413f-9005-7ca91079e8d2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.144748] env[62740]: DEBUG oslo_vmware.api [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for the task: (returnval){ [ 1607.144748] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52f1b7b7-9155-775a-5a82-9b986ef5119a" [ 1607.144748] env[62740]: _type = "Task" [ 1607.144748] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.157531] env[62740]: DEBUG oslo_vmware.api [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52f1b7b7-9155-775a-5a82-9b986ef5119a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.203418] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1607.203653] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1607.203840] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Deleting the datastore file [datastore2] 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1607.204237] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c545908-c616-4cdc-9935-1509de0744bb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.210158] env[62740]: DEBUG oslo_vmware.api [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Waiting for the task: (returnval){ [ 1607.210158] env[62740]: value = "task-640305" [ 1607.210158] env[62740]: _type = "Task" [ 1607.210158] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.218311] env[62740]: DEBUG oslo_vmware.api [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Task: {'id': task-640305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.655285] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1607.655637] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Creating directory with path [datastore2] vmware_temp/cf17bf4c-bec1-49ed-ab4d-70b85ddeeabf/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1607.655791] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc397b08-3bf3-4dd6-8a55-1a858f026c7a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.667320] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Created directory with path [datastore2] vmware_temp/cf17bf4c-bec1-49ed-ab4d-70b85ddeeabf/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1607.667529] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Fetch image to [datastore2] vmware_temp/cf17bf4c-bec1-49ed-ab4d-70b85ddeeabf/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1607.667721] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/cf17bf4c-bec1-49ed-ab4d-70b85ddeeabf/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1607.668496] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a7c982-732f-49b9-95b3-e455ffcf14be {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.675519] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e570b61-31c5-4430-8f1b-df10c96a0608 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.684609] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15935f02-8b9c-4754-a5a9-41c28ea20ec2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.716852] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b01c1d-27bf-4b5d-ab15-993cc11cfa4c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.725097] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-74a032e7-9cd1-43e7-bd5f-b711d965da5d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.726747] env[62740]: DEBUG oslo_vmware.api [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Task: {'id': task-640305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066552} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.726987] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1607.727188] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1607.727362] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1607.727560] env[62740]: INFO nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1607.729685] env[62740]: DEBUG nova.compute.claims [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1607.729875] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1607.730114] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1607.732056] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 966e242b493e4191bed996fd6d8c9f7f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1607.747549] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1607.775498] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 966e242b493e4191bed996fd6d8c9f7f [ 1607.798489] env[62740]: DEBUG oslo_vmware.rw_handles [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cf17bf4c-bec1-49ed-ab4d-70b85ddeeabf/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1607.860715] env[62740]: DEBUG oslo_vmware.rw_handles [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1607.860902] env[62740]: DEBUG oslo_vmware.rw_handles [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cf17bf4c-bec1-49ed-ab4d-70b85ddeeabf/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1608.001205] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6090476-b123-4b51-bd81-c7837518cb92 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.008424] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6df6da-98b1-42c9-9539-d832a737e8b9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.038358] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4286df97-bb8b-49ab-841c-c0b435266fb5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.047023] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab2aa8ca-e647-49ed-bb23-384acb0e4ae1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.058330] env[62740]: DEBUG nova.compute.provider_tree [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1608.058840] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 3e59a46592d74508aeca4e2ec0f29e24 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.066093] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e59a46592d74508aeca4e2ec0f29e24 [ 1608.066957] env[62740]: DEBUG nova.scheduler.client.report [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1608.069225] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg b47918cf99774c2483a5547331af9dc2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.082256] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b47918cf99774c2483a5547331af9dc2 [ 1608.082939] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.353s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.083456] env[62740]: ERROR nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1608.083456] env[62740]: Faults: ['InvalidArgument'] [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Traceback (most recent call last): [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] self.driver.spawn(context, instance, image_meta, [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] self._fetch_image_if_missing(context, vi) [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] image_cache(vi, tmp_image_ds_loc) [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] vm_util.copy_virtual_disk( [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] session._wait_for_task(vmdk_copy_task) [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] return self.wait_for_task(task_ref) [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] return evt.wait() [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] result = hub.switch() [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] return self.greenlet.switch() [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] self.f(*self.args, **self.kw) [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] raise exceptions.translate_fault(task_info.error) [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Faults: ['InvalidArgument'] [ 1608.083456] env[62740]: ERROR nova.compute.manager [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] [ 1608.084406] env[62740]: DEBUG nova.compute.utils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1608.085500] env[62740]: DEBUG nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Build of instance 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7 was re-scheduled: A specified parameter was not correct: fileType [ 1608.085500] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1608.085878] env[62740]: DEBUG nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1608.086065] env[62740]: DEBUG nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1608.086237] env[62740]: DEBUG nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1608.086400] env[62740]: DEBUG nova.network.neutron [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1608.521908] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 76392c282d674605af8cfa89ba95a191 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.532701] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76392c282d674605af8cfa89ba95a191 [ 1608.533231] env[62740]: DEBUG nova.network.neutron [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.533731] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg c84280fa00c94e228786e020079b36cf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.547234] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c84280fa00c94e228786e020079b36cf [ 1608.547775] env[62740]: INFO nova.compute.manager [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Took 0.46 seconds to deallocate network for instance. [ 1608.549488] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 8e77eeac892849bdb7a859ec0e19b27a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.585608] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e77eeac892849bdb7a859ec0e19b27a [ 1608.588793] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg f409debfcb61405d9177f9dfb102671b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.620384] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f409debfcb61405d9177f9dfb102671b [ 1608.642739] env[62740]: INFO nova.scheduler.client.report [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Deleted allocations for instance 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7 [ 1608.648965] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 912ad174160e486f9dbacd7781407cfe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.665820] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 912ad174160e486f9dbacd7781407cfe [ 1608.666448] env[62740]: DEBUG oslo_concurrency.lockutils [None req-72f312f4-5291-41a1-88aa-a397a2e5d305 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Lock "1ff3f5e9-284a-49b2-ad55-2c42f9b051c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 474.824s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.667032] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 7407a34562da40119fabeec1d0fcd651 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.667827] env[62740]: DEBUG oslo_concurrency.lockutils [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Lock "1ff3f5e9-284a-49b2-ad55-2c42f9b051c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 278.834s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.668082] env[62740]: DEBUG oslo_concurrency.lockutils [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Acquiring lock "1ff3f5e9-284a-49b2-ad55-2c42f9b051c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.668309] env[62740]: DEBUG oslo_concurrency.lockutils [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Lock "1ff3f5e9-284a-49b2-ad55-2c42f9b051c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.668478] env[62740]: DEBUG oslo_concurrency.lockutils [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Lock "1ff3f5e9-284a-49b2-ad55-2c42f9b051c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.670903] env[62740]: INFO nova.compute.manager [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Terminating instance [ 1608.673626] env[62740]: DEBUG nova.compute.manager [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1608.674020] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1608.674232] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a2971f49-14dc-4e2a-a0c6-1aa56d39b3a6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.679635] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7407a34562da40119fabeec1d0fcd651 [ 1608.680160] env[62740]: DEBUG nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1608.681833] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg aeacedf73a3a40e89693e2097ee4cb3b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.689238] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e059ca3-950b-4fae-a59a-dd12209ec279 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.716064] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7 could not be found. [ 1608.716282] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1608.716461] env[62740]: INFO nova.compute.manager [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1608.716708] env[62740]: DEBUG oslo.service.loopingcall [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1608.716932] env[62740]: DEBUG nova.compute.manager [-] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1608.717968] env[62740]: DEBUG nova.network.neutron [-] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1608.721055] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aeacedf73a3a40e89693e2097ee4cb3b [ 1608.738020] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.738020] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.738020] env[62740]: INFO nova.compute.claims [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1608.739043] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 9458765b8a414114b989a559896050c7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.741585] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6ca99cbfd8a144b2ab739f3e939a8d2a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.747633] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ca99cbfd8a144b2ab739f3e939a8d2a [ 1608.748088] env[62740]: DEBUG nova.network.neutron [-] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.748529] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d704564f78ae4725b30b12deee4d21b0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.761165] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d704564f78ae4725b30b12deee4d21b0 [ 1608.761165] env[62740]: INFO nova.compute.manager [-] [instance: 1ff3f5e9-284a-49b2-ad55-2c42f9b051c7] Took 0.04 seconds to deallocate network for instance. [ 1608.763309] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 8f458e190b3b49da874865c4448640af in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.779009] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9458765b8a414114b989a559896050c7 [ 1608.779808] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 921278b932a349fa90cdcb30c3ab9008 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.785887] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 921278b932a349fa90cdcb30c3ab9008 [ 1608.789612] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f458e190b3b49da874865c4448640af [ 1608.803342] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 9cd56794f380477d8d336608d62fb2d3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.838460] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9cd56794f380477d8d336608d62fb2d3 [ 1608.841523] env[62740]: DEBUG oslo_concurrency.lockutils [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Lock "1ff3f5e9-284a-49b2-ad55-2c42f9b051c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.841856] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-07d2a2e6-0e56-427a-89d3-3075eb6935c1 tempest-ServerMetadataTestJSON-348738397 tempest-ServerMetadataTestJSON-348738397-project-member] Expecting reply to msg 631cd4df1ce64666857bd58d3acb3a90 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.854401] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 631cd4df1ce64666857bd58d3acb3a90 [ 1608.924089] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b372c6-3d1b-463f-8ae7-a9c621e99635 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.931908] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f55307-3615-4d2e-ad4e-d1271d1808f5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.960414] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173bc645-68f0-4cda-95c7-22c1231733b6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.967388] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9a6284-bb8c-407d-86d8-cd89a8623b5e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.981031] env[62740]: DEBUG nova.compute.provider_tree [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1608.981534] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 21a296076c8a45ce8cf385823ec7d9d6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1608.989273] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21a296076c8a45ce8cf385823ec7d9d6 [ 1608.990097] env[62740]: DEBUG nova.scheduler.client.report [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1608.992275] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 5320cdc7a6ae480ab1a66f7f4fd4e60e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1609.004595] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5320cdc7a6ae480ab1a66f7f4fd4e60e [ 1609.005261] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.270s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1609.005716] env[62740]: DEBUG nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1609.007424] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 6ecf5c34be53407c8ec28ff0399d0153 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1609.034107] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ecf5c34be53407c8ec28ff0399d0153 [ 1609.035609] env[62740]: DEBUG nova.compute.utils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1609.036197] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg d25fd522108e4b5fa963ee6df51384f8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1609.037236] env[62740]: DEBUG nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1609.037400] env[62740]: DEBUG nova.network.neutron [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1609.045332] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d25fd522108e4b5fa963ee6df51384f8 [ 1609.045818] env[62740]: DEBUG nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1609.047427] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg ca471e4a6d874c0790290765c71b570b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1609.076009] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca471e4a6d874c0790290765c71b570b [ 1609.078680] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 0e59b278b5dc49f3b97bef5d75e8d359 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1609.110950] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e59b278b5dc49f3b97bef5d75e8d359 [ 1609.112130] env[62740]: DEBUG nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1609.138205] env[62740]: DEBUG nova.virt.hardware [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1609.138476] env[62740]: DEBUG nova.virt.hardware [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1609.138665] env[62740]: DEBUG nova.virt.hardware [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1609.138870] env[62740]: DEBUG nova.virt.hardware [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1609.139035] env[62740]: DEBUG nova.virt.hardware [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1609.139189] env[62740]: DEBUG nova.virt.hardware [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1609.139398] env[62740]: DEBUG nova.virt.hardware [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1609.139577] env[62740]: DEBUG nova.virt.hardware [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1609.139771] env[62740]: DEBUG nova.virt.hardware [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1609.139937] env[62740]: DEBUG nova.virt.hardware [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1609.140134] env[62740]: DEBUG nova.virt.hardware [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1609.141145] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c9fc067-0472-443b-8a2c-034e767ba7de {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.145058] env[62740]: DEBUG nova.policy [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96885eac4aea4c049695f190c31b8b0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c5b3436283d24d41ae0e599a35d1850c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1609.152237] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4756d32e-24b8-40d0-a4cd-12c79c134bbf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.520191] env[62740]: DEBUG nova.network.neutron [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Successfully created port: 91e7d296-c950-4537-9447-6e3c2fbe9134 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1610.183354] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7abd7887d92d4e3eb9dd651ae02afb5a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1610.192872] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7abd7887d92d4e3eb9dd651ae02afb5a [ 1610.194919] env[62740]: DEBUG nova.network.neutron [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Successfully updated port: 91e7d296-c950-4537-9447-6e3c2fbe9134 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1610.195379] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg cbe2e8a817a941378047abc85830ad45 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1610.203972] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbe2e8a817a941378047abc85830ad45 [ 1610.204543] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "refresh_cache-3aa2858e-d422-408a-a83a-98382f971add" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1610.204679] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquired lock "refresh_cache-3aa2858e-d422-408a-a83a-98382f971add" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1610.204824] env[62740]: DEBUG nova.network.neutron [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1610.205201] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 8958feb849754a76b528e4c319e1c6c4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1610.211741] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8958feb849754a76b528e4c319e1c6c4 [ 1610.257753] env[62740]: DEBUG nova.network.neutron [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1610.453825] env[62740]: DEBUG nova.network.neutron [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Updating instance_info_cache with network_info: [{"id": "91e7d296-c950-4537-9447-6e3c2fbe9134", "address": "fa:16:3e:8b:41:d8", "network": {"id": "2df153a5-47a3-46f8-96b7-aa7f4f657f55", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1427651514-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5b3436283d24d41ae0e599a35d1850c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91e7d296-c9", "ovs_interfaceid": "91e7d296-c950-4537-9447-6e3c2fbe9134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1610.454382] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 29996e40f3bc4e70979b4785d5a03301 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1610.465836] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29996e40f3bc4e70979b4785d5a03301 [ 1610.466511] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Releasing lock "refresh_cache-3aa2858e-d422-408a-a83a-98382f971add" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1610.466801] env[62740]: DEBUG nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Instance network_info: |[{"id": "91e7d296-c950-4537-9447-6e3c2fbe9134", "address": "fa:16:3e:8b:41:d8", "network": {"id": "2df153a5-47a3-46f8-96b7-aa7f4f657f55", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1427651514-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5b3436283d24d41ae0e599a35d1850c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91e7d296-c9", "ovs_interfaceid": "91e7d296-c950-4537-9447-6e3c2fbe9134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1610.467214] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:41:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a2b284a-a29c-478f-b763-c9b5821e20ec', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91e7d296-c950-4537-9447-6e3c2fbe9134', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1610.474924] env[62740]: DEBUG oslo.service.loopingcall [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1610.475419] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1610.475662] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-df657854-e448-4098-8ab3-11177cc93bdb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.495956] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1610.495956] env[62740]: value = "task-640306" [ 1610.495956] env[62740]: _type = "Task" [ 1610.495956] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.505948] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640306, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.575146] env[62740]: DEBUG nova.compute.manager [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Received event network-vif-plugged-91e7d296-c950-4537-9447-6e3c2fbe9134 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1610.575535] env[62740]: DEBUG oslo_concurrency.lockutils [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] Acquiring lock "3aa2858e-d422-408a-a83a-98382f971add-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1610.575773] env[62740]: DEBUG oslo_concurrency.lockutils [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] Lock "3aa2858e-d422-408a-a83a-98382f971add-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.575951] env[62740]: DEBUG oslo_concurrency.lockutils [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] Lock "3aa2858e-d422-408a-a83a-98382f971add-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.576139] env[62740]: DEBUG nova.compute.manager [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] [instance: 3aa2858e-d422-408a-a83a-98382f971add] No waiting events found dispatching network-vif-plugged-91e7d296-c950-4537-9447-6e3c2fbe9134 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1610.576309] env[62740]: WARNING nova.compute.manager [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Received unexpected event network-vif-plugged-91e7d296-c950-4537-9447-6e3c2fbe9134 for instance with vm_state building and task_state spawning. [ 1610.576478] env[62740]: DEBUG nova.compute.manager [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Received event network-changed-91e7d296-c950-4537-9447-6e3c2fbe9134 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1610.576630] env[62740]: DEBUG nova.compute.manager [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Refreshing instance network info cache due to event network-changed-91e7d296-c950-4537-9447-6e3c2fbe9134. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1610.576815] env[62740]: DEBUG oslo_concurrency.lockutils [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] Acquiring lock "refresh_cache-3aa2858e-d422-408a-a83a-98382f971add" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1610.576950] env[62740]: DEBUG oslo_concurrency.lockutils [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] Acquired lock "refresh_cache-3aa2858e-d422-408a-a83a-98382f971add" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1610.577140] env[62740]: DEBUG nova.network.neutron [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Refreshing network info cache for port 91e7d296-c950-4537-9447-6e3c2fbe9134 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1610.577632] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] Expecting reply to msg ac85653799cb48c78f8bf1cd90a82c1d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1610.585104] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac85653799cb48c78f8bf1cd90a82c1d [ 1611.007450] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640306, 'name': CreateVM_Task, 'duration_secs': 0.285649} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.008096] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1611.008195] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.008361] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.008699] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1611.008947] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-172dd5f3-79ea-4535-bfa4-f34d8c4ba30f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.013205] env[62740]: DEBUG oslo_vmware.api [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for the task: (returnval){ [ 1611.013205] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52e95297-f4b1-fdb7-f4b4-ab3b97573aa8" [ 1611.013205] env[62740]: _type = "Task" [ 1611.013205] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.020353] env[62740]: DEBUG oslo_vmware.api [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52e95297-f4b1-fdb7-f4b4-ab3b97573aa8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.145459] env[62740]: DEBUG nova.network.neutron [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Updated VIF entry in instance network info cache for port 91e7d296-c950-4537-9447-6e3c2fbe9134. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1611.145631] env[62740]: DEBUG nova.network.neutron [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Updating instance_info_cache with network_info: [{"id": "91e7d296-c950-4537-9447-6e3c2fbe9134", "address": "fa:16:3e:8b:41:d8", "network": {"id": "2df153a5-47a3-46f8-96b7-aa7f4f657f55", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1427651514-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5b3436283d24d41ae0e599a35d1850c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91e7d296-c9", "ovs_interfaceid": "91e7d296-c950-4537-9447-6e3c2fbe9134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1611.146236] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] Expecting reply to msg 1b14f5265f064ca3b17062595c48628f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1611.154818] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b14f5265f064ca3b17062595c48628f [ 1611.155703] env[62740]: DEBUG oslo_concurrency.lockutils [req-6ea557f3-99f9-415f-a997-341005f09d41 req-f75d7bf4-ceee-4335-aeef-51f0e1ccc9e0 service nova] Releasing lock "refresh_cache-3aa2858e-d422-408a-a83a-98382f971add" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.525032] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.525321] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1611.525509] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.890983] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1616.891478] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1617.155287] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "9a595940-16ba-401a-922f-331cf87093c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.155513] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "9a595940-16ba-401a-922f-331cf87093c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.891371] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1617.891659] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1617.891659] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1617.892759] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 90b33e91f00d4204b07be8eb5bcfd9a4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1617.911382] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90b33e91f00d4204b07be8eb5bcfd9a4 [ 1617.913617] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1617.913945] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1617.914163] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1617.914249] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1617.914359] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1617.914513] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1617.914594] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1617.914698] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1617.914828] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1617.914990] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1617.915087] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1617.915584] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1618.200885] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "d2fb90b7-1618-4f07-8854-81566887a7cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.201137] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "d2fb90b7-1618-4f07-8854-81566887a7cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.891028] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1618.891028] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1618.891028] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1619.891368] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1619.891851] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f7f7604cccc44f669adfa36ca03ec206 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1619.901997] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7f7604cccc44f669adfa36ca03ec206 [ 1619.902960] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.903178] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.903342] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.903495] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1619.904565] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d0031a-24ca-463b-bac8-b01436320814 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.913418] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f01d44-0320-4f4b-8d4f-fba99515e739 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.926795] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c5e89e-1d03-481d-b698-b0b8e745f9df {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.932792] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de754732-14cc-44b8-9b8e-5fe2d0efbd5c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.962016] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181692MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1619.962174] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.962368] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.963187] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7e6b09483f92464a97c7ffca4f15f7e1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1620.018915] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e6b09483f92464a97c7ffca4f15f7e1 [ 1620.023123] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0923197562544aea889be0d3d3db0f02 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1620.034859] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0923197562544aea889be0d3d3db0f02 [ 1620.055213] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2deff09f-d24f-4609-91f2-1585e8407c2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.055374] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3f36f081-2851-4339-860d-0a302ef4ee2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.055506] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 913ddb91-9d46-459e-8775-c9f380ed3cc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.055635] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 07efd13e-40d0-4158-b17c-6f5c75474ce3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.055760] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388d71f2-b229-4666-a53d-d5b07e498eed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.055879] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c0daf074-eecb-4899-938f-477031efc6d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.055999] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 61fea037-aac3-47ef-aa6a-5dfa657d840d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.056170] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 86c57375-8328-4344-b228-2f1ce6efc71e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.056301] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a41506d2-33b2-40b8-badb-41312c7abbd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.056419] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3aa2858e-d422-408a-a83a-98382f971add actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.056950] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 14300cd3cde8470bb2996894f92c0009 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1620.067749] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14300cd3cde8470bb2996894f92c0009 [ 1620.068473] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1620.069024] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 64f24fd7feb2429c83371ef3fff60640 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1620.080026] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64f24fd7feb2429c83371ef3fff60640 [ 1620.080673] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1620.081148] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ff3629708e814e0486d87325bc7e6220 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1620.090327] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff3629708e814e0486d87325bc7e6220 [ 1620.090840] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 9a595940-16ba-401a-922f-331cf87093c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1620.091309] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7e0b3c4efa7e4988bb2be28cdb7b5025 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1620.101689] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e0b3c4efa7e4988bb2be28cdb7b5025 [ 1620.102388] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d2fb90b7-1618-4f07-8854-81566887a7cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1620.102617] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1620.102768] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1620.267946] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5504ada4-373d-42e3-9d2e-e4917e1ca03d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.275621] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11d2a0c-6524-47a4-a317-339ed4b3b28f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.305551] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d183d46f-1eae-40e5-9262-579a8f118893 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.312453] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646a5a57-7951-428c-85dc-2663e94f92cd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.324984] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1620.325455] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg aaba2e7b8a064b4dbd43ae8171b006cc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1620.332911] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aaba2e7b8a064b4dbd43ae8171b006cc [ 1620.333806] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1620.336034] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 9dcc08b4bd3f4ce19566c9403de00f62 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1620.348247] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9dcc08b4bd3f4ce19566c9403de00f62 [ 1620.348988] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1620.349193] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.387s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.349605] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1623.349907] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1625.886659] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1625.887307] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 02396f045c234e86a3d03f506a821ae1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1625.904974] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02396f045c234e86a3d03f506a821ae1 [ 1626.086069] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 18d538128bb04c3c9b51fe24f8d86cf8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1626.098524] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18d538128bb04c3c9b51fe24f8d86cf8 [ 1626.099075] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "3aa2858e-d422-408a-a83a-98382f971add" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.397297] env[62740]: DEBUG oslo_concurrency.lockutils [None req-005425bd-3467-464a-8f5d-3242349a1bc5 tempest-ServersAaction247Test-424946737 tempest-ServersAaction247Test-424946737-project-member] Acquiring lock "08197ee6-55de-40f8-8704-641c0614cad6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.397647] env[62740]: DEBUG oslo_concurrency.lockutils [None req-005425bd-3467-464a-8f5d-3242349a1bc5 tempest-ServersAaction247Test-424946737 tempest-ServersAaction247Test-424946737-project-member] Lock "08197ee6-55de-40f8-8704-641c0614cad6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.445430] env[62740]: WARNING oslo_vmware.rw_handles [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1657.445430] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1657.445430] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1657.445430] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1657.445430] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1657.445430] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1657.445430] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1657.445430] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1657.445430] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1657.445430] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1657.445430] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1657.445430] env[62740]: ERROR oslo_vmware.rw_handles [ 1657.445989] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/cf17bf4c-bec1-49ed-ab4d-70b85ddeeabf/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1657.447912] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1657.448180] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Copying Virtual Disk [datastore2] vmware_temp/cf17bf4c-bec1-49ed-ab4d-70b85ddeeabf/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/cf17bf4c-bec1-49ed-ab4d-70b85ddeeabf/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1657.448459] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d1c64ef-26f0-4f4b-9690-e0bc78a458b4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.457585] env[62740]: DEBUG oslo_vmware.api [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for the task: (returnval){ [ 1657.457585] env[62740]: value = "task-640307" [ 1657.457585] env[62740]: _type = "Task" [ 1657.457585] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.466025] env[62740]: DEBUG oslo_vmware.api [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Task: {'id': task-640307, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.967331] env[62740]: DEBUG oslo_vmware.exceptions [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1657.967614] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1657.968179] env[62740]: ERROR nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1657.968179] env[62740]: Faults: ['InvalidArgument'] [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Traceback (most recent call last): [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] yield resources [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] self.driver.spawn(context, instance, image_meta, [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] self._fetch_image_if_missing(context, vi) [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] image_cache(vi, tmp_image_ds_loc) [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] vm_util.copy_virtual_disk( [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] session._wait_for_task(vmdk_copy_task) [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] return self.wait_for_task(task_ref) [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] return evt.wait() [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] result = hub.switch() [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] return self.greenlet.switch() [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] self.f(*self.args, **self.kw) [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] raise exceptions.translate_fault(task_info.error) [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Faults: ['InvalidArgument'] [ 1657.968179] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] [ 1657.969157] env[62740]: INFO nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Terminating instance [ 1657.970099] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1657.970310] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1657.970543] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41ca3f48-2ab5-46a7-82f6-ac0ed324a20f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.972682] env[62740]: DEBUG nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1657.972879] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1657.973596] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475b198e-0777-4e0b-801b-74021586a687 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.981230] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1657.981472] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50ae6662-b7c3-46e2-bcbc-43e61e4d89c6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.983583] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1657.983758] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1657.984685] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99d009da-3f5f-4d1c-bfa1-9805eaaac939 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.989185] env[62740]: DEBUG oslo_vmware.api [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Waiting for the task: (returnval){ [ 1657.989185] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52b60c28-8f54-6459-9e75-bfe320ebee6c" [ 1657.989185] env[62740]: _type = "Task" [ 1657.989185] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.996240] env[62740]: DEBUG oslo_vmware.api [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52b60c28-8f54-6459-9e75-bfe320ebee6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.048741] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1658.048984] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1658.049194] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Deleting the datastore file [datastore2] 2deff09f-d24f-4609-91f2-1585e8407c2a {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1658.049451] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f54892df-7721-45c3-ae46-aa396815d945 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.056213] env[62740]: DEBUG oslo_vmware.api [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for the task: (returnval){ [ 1658.056213] env[62740]: value = "task-640309" [ 1658.056213] env[62740]: _type = "Task" [ 1658.056213] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.063627] env[62740]: DEBUG oslo_vmware.api [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Task: {'id': task-640309, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.499737] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1658.500041] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Creating directory with path [datastore2] vmware_temp/7bff0742-b76b-426d-acbe-695819bafbc4/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1658.500246] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c376103c-0b7c-418a-84b1-feb8f95aa20c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.512189] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Created directory with path [datastore2] vmware_temp/7bff0742-b76b-426d-acbe-695819bafbc4/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1658.512339] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Fetch image to [datastore2] vmware_temp/7bff0742-b76b-426d-acbe-695819bafbc4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1658.512510] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/7bff0742-b76b-426d-acbe-695819bafbc4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1658.513290] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6637be28-cea2-49e6-878a-e08c887c2609 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.519891] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53b53ce-84b5-4c6a-b966-5c591e8659de {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.528650] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaccb0e8-c7b6-4ac9-9845-c4127d6be67a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.562043] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8943f4-7600-47b5-bb38-d67cdf3a2da4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.569891] env[62740]: DEBUG oslo_vmware.api [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Task: {'id': task-640309, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074952} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.571447] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1658.571646] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1658.571823] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1658.572007] env[62740]: INFO nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1658.573798] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8aef7116-61e8-4f18-9325-f2d872472efe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.575759] env[62740]: DEBUG nova.compute.claims [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1658.575936] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.576158] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.578162] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg b467dbb8f36b42a5b811204019dcec9b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1658.599485] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1658.621028] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b467dbb8f36b42a5b811204019dcec9b [ 1658.652357] env[62740]: DEBUG oslo_vmware.rw_handles [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7bff0742-b76b-426d-acbe-695819bafbc4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1658.713333] env[62740]: DEBUG oslo_vmware.rw_handles [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1658.713556] env[62740]: DEBUG oslo_vmware.rw_handles [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7bff0742-b76b-426d-acbe-695819bafbc4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1658.851664] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c2d56d-5d73-4240-8912-6b4e3ab0a2a9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.860506] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22469aea-c319-4e0c-a30d-a313aebc392b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.889012] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1f3e95-046e-479c-a8ce-fe87f3d9dbeb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.895471] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe3e094-c7c9-42ac-91eb-95b2cdb2d596 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.907734] env[62740]: DEBUG nova.compute.provider_tree [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1658.908250] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg e3c6389dd1194beb829ac80e44d03c3d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1658.915325] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3c6389dd1194beb829ac80e44d03c3d [ 1658.916223] env[62740]: DEBUG nova.scheduler.client.report [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1658.918617] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 5a5bf67a65a744028623d137ede7064e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1658.930057] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a5bf67a65a744028623d137ede7064e [ 1658.930793] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.355s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.931330] env[62740]: ERROR nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1658.931330] env[62740]: Faults: ['InvalidArgument'] [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Traceback (most recent call last): [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] self.driver.spawn(context, instance, image_meta, [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] self._fetch_image_if_missing(context, vi) [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] image_cache(vi, tmp_image_ds_loc) [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] vm_util.copy_virtual_disk( [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] session._wait_for_task(vmdk_copy_task) [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] return self.wait_for_task(task_ref) [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] return evt.wait() [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] result = hub.switch() [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] return self.greenlet.switch() [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] self.f(*self.args, **self.kw) [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] raise exceptions.translate_fault(task_info.error) [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Faults: ['InvalidArgument'] [ 1658.931330] env[62740]: ERROR nova.compute.manager [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] [ 1658.932158] env[62740]: DEBUG nova.compute.utils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1658.933807] env[62740]: DEBUG nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Build of instance 2deff09f-d24f-4609-91f2-1585e8407c2a was re-scheduled: A specified parameter was not correct: fileType [ 1658.933807] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1658.934217] env[62740]: DEBUG nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1658.934396] env[62740]: DEBUG nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1658.934570] env[62740]: DEBUG nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1658.934732] env[62740]: DEBUG nova.network.neutron [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1659.358526] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 0070cedc44ce48be904574c57063b001 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.368867] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0070cedc44ce48be904574c57063b001 [ 1659.369504] env[62740]: DEBUG nova.network.neutron [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1659.370082] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 9afa7a9978d34dfbb9c535aa1377b5ad in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.382843] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9afa7a9978d34dfbb9c535aa1377b5ad [ 1659.383585] env[62740]: INFO nova.compute.manager [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Took 0.45 seconds to deallocate network for instance. [ 1659.385425] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg d99ae29c064f48a08696b5bcfd22df9d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.420488] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d99ae29c064f48a08696b5bcfd22df9d [ 1659.425288] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 2da6cbd884834eb6a0e4e1b9aa737cef in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.467206] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2da6cbd884834eb6a0e4e1b9aa737cef [ 1659.493701] env[62740]: INFO nova.scheduler.client.report [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Deleted allocations for instance 2deff09f-d24f-4609-91f2-1585e8407c2a [ 1659.500920] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 083441b5b8d04c5ab3011dac16243f88 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.516280] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 083441b5b8d04c5ab3011dac16243f88 [ 1659.517010] env[62740]: DEBUG oslo_concurrency.lockutils [None req-49356bb3-b923-4ccf-a993-fc0bbc812d82 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "2deff09f-d24f-4609-91f2-1585e8407c2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 484.415s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.517586] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg a497e330108848c28eeaebc97591aadd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.518670] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "2deff09f-d24f-4609-91f2-1585e8407c2a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 288.601s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.518904] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "2deff09f-d24f-4609-91f2-1585e8407c2a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.519129] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "2deff09f-d24f-4609-91f2-1585e8407c2a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.519301] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "2deff09f-d24f-4609-91f2-1585e8407c2a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.521570] env[62740]: INFO nova.compute.manager [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Terminating instance [ 1659.523609] env[62740]: DEBUG nova.compute.manager [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1659.523836] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1659.524389] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1890ca6-1449-4063-b5f5-b2c340165d66 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.532964] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a497e330108848c28eeaebc97591aadd [ 1659.536444] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec307a9a-214f-40dc-90f6-d19b68da1554 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.548729] env[62740]: DEBUG nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1659.550680] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg e2ee59f836354c6aae3e85ea89555dac in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.574236] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2deff09f-d24f-4609-91f2-1585e8407c2a could not be found. [ 1659.574236] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1659.574236] env[62740]: INFO nova.compute.manager [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1659.574236] env[62740]: DEBUG oslo.service.loopingcall [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1659.575234] env[62740]: DEBUG nova.compute.manager [-] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1659.575391] env[62740]: DEBUG nova.network.neutron [-] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1659.594385] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2ee59f836354c6aae3e85ea89555dac [ 1659.612903] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.613204] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.614800] env[62740]: INFO nova.compute.claims [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1659.616761] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 0e5741cf0c9047ffaaf3a2ef78fc756f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.663975] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e5741cf0c9047ffaaf3a2ef78fc756f [ 1659.665791] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 5ab6bf7032c742bb9c1598fe269933a8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.676163] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ab6bf7032c742bb9c1598fe269933a8 [ 1659.694214] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 295ea1966f0048879e77830278c82aae in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.702202] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 295ea1966f0048879e77830278c82aae [ 1659.702405] env[62740]: DEBUG nova.network.neutron [-] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1659.702811] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg aad38c91f4f34385bf386c7aa7414ff4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.713834] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aad38c91f4f34385bf386c7aa7414ff4 [ 1659.714345] env[62740]: INFO nova.compute.manager [-] [instance: 2deff09f-d24f-4609-91f2-1585e8407c2a] Took 0.14 seconds to deallocate network for instance. [ 1659.718508] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 24c2f91583cd4e209bdc5250e1002c2a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.755382] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24c2f91583cd4e209bdc5250e1002c2a [ 1659.771429] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 974cf6c9b83e475985ff22aba324ec50 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.824587] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 974cf6c9b83e475985ff22aba324ec50 [ 1659.827595] env[62740]: DEBUG oslo_concurrency.lockutils [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "2deff09f-d24f-4609-91f2-1585e8407c2a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.309s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.827928] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-dad7a0bd-a740-4f77-887e-2ffba67ff5d3 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 19dcc11d7de043fb95781bb3c0429939 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.841528] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19dcc11d7de043fb95781bb3c0429939 [ 1659.880178] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bce0a12-d7e8-4b0e-a2d7-dd6cb0ff098a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.888852] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3561742f-902a-4d02-a62f-42f670160bbe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.921550] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12acfe7-56e6-4f4f-af84-59cac6a53f38 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.929691] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2432f4-808f-4f3b-ada9-87aaa77231f7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.943106] env[62740]: DEBUG nova.compute.provider_tree [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1659.943581] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 17434326cd3a41879e6f26881e87e228 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.953225] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17434326cd3a41879e6f26881e87e228 [ 1659.953225] env[62740]: DEBUG nova.scheduler.client.report [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1659.954396] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 4239a3451972468b950fa59ac4a65cac in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1659.965544] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4239a3451972468b950fa59ac4a65cac [ 1659.966347] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.353s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.966837] env[62740]: DEBUG nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1659.968718] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg a813395ef37342b58a2c60b902a30343 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1660.002604] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a813395ef37342b58a2c60b902a30343 [ 1660.004012] env[62740]: DEBUG nova.compute.utils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1660.004628] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 7fa0dc7195ba411d89dd1af29d60783e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1660.005613] env[62740]: DEBUG nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1660.005787] env[62740]: DEBUG nova.network.neutron [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1660.014359] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7fa0dc7195ba411d89dd1af29d60783e [ 1660.015033] env[62740]: DEBUG nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1660.016914] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg ede5e7de42624e508ee6a16b6f57ad6f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1660.050818] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ede5e7de42624e508ee6a16b6f57ad6f [ 1660.053816] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg e221c164cd5c42c88d3a37ea5bfb74df in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1660.065421] env[62740]: DEBUG nova.policy [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aee525677ce346b59af803377086c7b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '511c682375de4d5294723b0d656190d2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1660.084608] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e221c164cd5c42c88d3a37ea5bfb74df [ 1660.085889] env[62740]: DEBUG nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1660.113324] env[62740]: DEBUG nova.virt.hardware [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1660.113581] env[62740]: DEBUG nova.virt.hardware [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1660.113812] env[62740]: DEBUG nova.virt.hardware [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1660.113925] env[62740]: DEBUG nova.virt.hardware [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1660.114089] env[62740]: DEBUG nova.virt.hardware [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1660.114243] env[62740]: DEBUG nova.virt.hardware [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1660.114450] env[62740]: DEBUG nova.virt.hardware [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1660.114610] env[62740]: DEBUG nova.virt.hardware [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1660.114793] env[62740]: DEBUG nova.virt.hardware [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1660.114993] env[62740]: DEBUG nova.virt.hardware [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1660.115189] env[62740]: DEBUG nova.virt.hardware [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1660.116094] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0159409-ce91-4300-bc58-c989eb3d1e41 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.124886] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c309a3-0685-44ff-b70e-4268252a1ffc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.494510] env[62740]: DEBUG nova.network.neutron [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Successfully created port: f1b07082-3d48-4c34-b5fc-b237e74b7697 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1661.239210] env[62740]: DEBUG nova.network.neutron [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Successfully updated port: f1b07082-3d48-4c34-b5fc-b237e74b7697 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1661.239759] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 87070dbc9ca74af68710ba09a73067af in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1661.252654] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87070dbc9ca74af68710ba09a73067af [ 1661.253454] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "refresh_cache-c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1661.253597] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquired lock "refresh_cache-c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1661.253751] env[62740]: DEBUG nova.network.neutron [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1661.254152] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 7d8c5951ebe548e68aac0b5136066264 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1661.261071] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d8c5951ebe548e68aac0b5136066264 [ 1661.293970] env[62740]: DEBUG nova.network.neutron [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1661.516976] env[62740]: DEBUG nova.compute.manager [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Received event network-vif-plugged-f1b07082-3d48-4c34-b5fc-b237e74b7697 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1661.517212] env[62740]: DEBUG oslo_concurrency.lockutils [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] Acquiring lock "c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.517343] env[62740]: DEBUG oslo_concurrency.lockutils [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] Lock "c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.517517] env[62740]: DEBUG oslo_concurrency.lockutils [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] Lock "c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.517691] env[62740]: DEBUG nova.compute.manager [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] No waiting events found dispatching network-vif-plugged-f1b07082-3d48-4c34-b5fc-b237e74b7697 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1661.518074] env[62740]: WARNING nova.compute.manager [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Received unexpected event network-vif-plugged-f1b07082-3d48-4c34-b5fc-b237e74b7697 for instance with vm_state building and task_state spawning. [ 1661.518281] env[62740]: DEBUG nova.compute.manager [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Received event network-changed-f1b07082-3d48-4c34-b5fc-b237e74b7697 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1661.518451] env[62740]: DEBUG nova.compute.manager [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Refreshing instance network info cache due to event network-changed-f1b07082-3d48-4c34-b5fc-b237e74b7697. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1661.518717] env[62740]: DEBUG oslo_concurrency.lockutils [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] Acquiring lock "refresh_cache-c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1661.523304] env[62740]: DEBUG nova.network.neutron [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Updating instance_info_cache with network_info: [{"id": "f1b07082-3d48-4c34-b5fc-b237e74b7697", "address": "fa:16:3e:4b:1b:d4", "network": {"id": "70fb46bd-81f8-49d4-bb8f-cb2fb8b6944e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2105504489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "511c682375de4d5294723b0d656190d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1b07082-3d", "ovs_interfaceid": "f1b07082-3d48-4c34-b5fc-b237e74b7697", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1661.523864] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg e9d990c333b4448eb284d66d097bcc39 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1661.533571] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9d990c333b4448eb284d66d097bcc39 [ 1661.534238] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Releasing lock "refresh_cache-c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.534521] env[62740]: DEBUG nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Instance network_info: |[{"id": "f1b07082-3d48-4c34-b5fc-b237e74b7697", "address": "fa:16:3e:4b:1b:d4", "network": {"id": "70fb46bd-81f8-49d4-bb8f-cb2fb8b6944e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2105504489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "511c682375de4d5294723b0d656190d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1b07082-3d", "ovs_interfaceid": "f1b07082-3d48-4c34-b5fc-b237e74b7697", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1661.534797] env[62740]: DEBUG oslo_concurrency.lockutils [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] Acquired lock "refresh_cache-c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1661.534972] env[62740]: DEBUG nova.network.neutron [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Refreshing network info cache for port f1b07082-3d48-4c34-b5fc-b237e74b7697 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1661.535551] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] Expecting reply to msg 38546b4ba8074c88921b42cfb605b021 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1661.536386] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:1b:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77ccbd87-ecfd-4b2d-a1ea-29774addcef6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1b07082-3d48-4c34-b5fc-b237e74b7697', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1661.545670] env[62740]: DEBUG oslo.service.loopingcall [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1661.546343] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38546b4ba8074c88921b42cfb605b021 [ 1661.546706] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1661.549233] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e390e5e6-1099-4789-bb91-981bb5c5d9c3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.570030] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1661.570030] env[62740]: value = "task-640310" [ 1661.570030] env[62740]: _type = "Task" [ 1661.570030] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.580602] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640310, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.846895] env[62740]: DEBUG nova.network.neutron [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Updated VIF entry in instance network info cache for port f1b07082-3d48-4c34-b5fc-b237e74b7697. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1661.847270] env[62740]: DEBUG nova.network.neutron [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Updating instance_info_cache with network_info: [{"id": "f1b07082-3d48-4c34-b5fc-b237e74b7697", "address": "fa:16:3e:4b:1b:d4", "network": {"id": "70fb46bd-81f8-49d4-bb8f-cb2fb8b6944e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2105504489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "511c682375de4d5294723b0d656190d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1b07082-3d", "ovs_interfaceid": "f1b07082-3d48-4c34-b5fc-b237e74b7697", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1661.847881] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] Expecting reply to msg 3962d158c777451b8eba419b52d7d263 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1661.857064] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3962d158c777451b8eba419b52d7d263 [ 1661.857713] env[62740]: DEBUG oslo_concurrency.lockutils [req-e4267b28-0877-44b0-bbe6-307d4829dfba req-5c072f9b-dd56-47ee-83bb-083252a7f024 service nova] Releasing lock "refresh_cache-c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1662.079649] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640310, 'name': CreateVM_Task, 'duration_secs': 0.314729} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.079820] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1662.080500] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1662.080673] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1662.081046] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1662.081302] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd1e1bb7-3804-4968-b1b6-5aca6f9a0e31 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.085559] env[62740]: DEBUG oslo_vmware.api [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for the task: (returnval){ [ 1662.085559] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52b3ecfb-c6a2-0d4b-8065-042886d5ce21" [ 1662.085559] env[62740]: _type = "Task" [ 1662.085559] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.093066] env[62740]: DEBUG oslo_vmware.api [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52b3ecfb-c6a2-0d4b-8065-042886d5ce21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.595810] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1662.596209] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1662.596302] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1668.874344] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Acquiring lock "19f27c16-45b3-47d8-acf0-18255844431f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.874657] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Lock "19f27c16-45b3-47d8-acf0-18255844431f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.892528] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1676.892528] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1677.891408] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1677.891584] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1677.891706] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1677.892316] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 47abe8648efc4439a9a9567ae9f5668e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1677.911734] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47abe8648efc4439a9a9567ae9f5668e [ 1677.913917] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1677.914085] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1677.914220] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1677.914351] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1677.914477] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1677.914602] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1677.914726] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1677.914846] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1677.914964] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1677.915093] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1677.915220] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1678.890631] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1678.890861] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1678.891019] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1679.890593] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1680.891073] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1680.891450] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 4252e030fac3460d8c66ae5e7e94cdce in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1680.903322] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4252e030fac3460d8c66ae5e7e94cdce [ 1680.904331] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.904544] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.904714] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.904924] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1680.905984] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7675d85d-b3a0-4b42-8b4d-66525ba34f05 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.915181] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396c899d-e420-4e67-8acf-7408d58d0eed {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.928778] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca26a57d-a537-417f-ae8a-c9cef34f180e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.934961] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46937f6c-fff4-4999-9beb-7df523f95ebe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.963945] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181639MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1680.964100] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.964291] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.965609] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg cdf5a962201b4899ac4d7c2e236d86db in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1681.002023] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cdf5a962201b4899ac4d7c2e236d86db [ 1681.005789] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 22c18b955ba14a1e8385b6d9838dd3d0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1681.015176] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22c18b955ba14a1e8385b6d9838dd3d0 [ 1681.037384] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3f36f081-2851-4339-860d-0a302ef4ee2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.037543] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 913ddb91-9d46-459e-8775-c9f380ed3cc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.037816] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 07efd13e-40d0-4158-b17c-6f5c75474ce3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.037816] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388d71f2-b229-4666-a53d-d5b07e498eed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.037929] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c0daf074-eecb-4899-938f-477031efc6d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.038065] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 61fea037-aac3-47ef-aa6a-5dfa657d840d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.038196] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 86c57375-8328-4344-b228-2f1ce6efc71e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.038313] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a41506d2-33b2-40b8-badb-41312c7abbd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.038429] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3aa2858e-d422-408a-a83a-98382f971add actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.038545] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.039140] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 3234ff35232a4b0abe958277019f5536 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1681.049372] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3234ff35232a4b0abe958277019f5536 [ 1681.050118] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1681.050732] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 31f3429ffeae488b85f25a0847b9e665 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1681.060210] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31f3429ffeae488b85f25a0847b9e665 [ 1681.060874] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 9a595940-16ba-401a-922f-331cf87093c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1681.061357] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 1322b8c3539c4be3a3356d04c013827c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1681.069965] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1322b8c3539c4be3a3356d04c013827c [ 1681.070618] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d2fb90b7-1618-4f07-8854-81566887a7cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1681.071082] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg d0dc219f9ac34052b4a184956a0e98ed in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1681.079861] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0dc219f9ac34052b4a184956a0e98ed [ 1681.080428] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 08197ee6-55de-40f8-8704-641c0614cad6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1681.080858] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg e6d41227ef3149d0931f2530cf235857 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1681.089976] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6d41227ef3149d0931f2530cf235857 [ 1681.090162] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 19f27c16-45b3-47d8-acf0-18255844431f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1681.090387] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1681.090535] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1681.260406] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8131b3a5-48bf-4ea4-9355-5bbd1de223e8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.268084] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d628a00-d7cd-43b6-9cd1-23b8d37d1899 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.299424] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98be3145-e3d2-448e-b47b-60beebd2c255 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.306347] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ec7f90-3452-4569-904e-1c4ef8835ecb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.318858] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1681.319315] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 118542910496424d8e9d786a9e878f44 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1681.326519] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 118542910496424d8e9d786a9e878f44 [ 1681.327356] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1681.329605] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 75889627467045d9a981b87f63307c3e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1681.343744] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75889627467045d9a981b87f63307c3e [ 1681.344398] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1681.344564] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.380s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.344982] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1684.345379] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1703.996765] env[62740]: WARNING oslo_vmware.rw_handles [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1703.996765] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1703.996765] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1703.996765] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1703.996765] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1703.996765] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1703.996765] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1703.996765] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1703.996765] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1703.996765] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1703.996765] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1703.996765] env[62740]: ERROR oslo_vmware.rw_handles [ 1703.997407] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/7bff0742-b76b-426d-acbe-695819bafbc4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1703.999305] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1703.999566] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Copying Virtual Disk [datastore2] vmware_temp/7bff0742-b76b-426d-acbe-695819bafbc4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/7bff0742-b76b-426d-acbe-695819bafbc4/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1703.999865] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d4df1cb-061d-41fd-bdcf-8b01757c094a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.009350] env[62740]: DEBUG oslo_vmware.api [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Waiting for the task: (returnval){ [ 1704.009350] env[62740]: value = "task-640311" [ 1704.009350] env[62740]: _type = "Task" [ 1704.009350] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.016939] env[62740]: DEBUG oslo_vmware.api [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Task: {'id': task-640311, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.520165] env[62740]: DEBUG oslo_vmware.exceptions [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1704.520471] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.521115] env[62740]: ERROR nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1704.521115] env[62740]: Faults: ['InvalidArgument'] [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Traceback (most recent call last): [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] yield resources [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] self.driver.spawn(context, instance, image_meta, [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] self._fetch_image_if_missing(context, vi) [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] image_cache(vi, tmp_image_ds_loc) [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] vm_util.copy_virtual_disk( [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] session._wait_for_task(vmdk_copy_task) [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] return self.wait_for_task(task_ref) [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] return evt.wait() [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] result = hub.switch() [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] return self.greenlet.switch() [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] self.f(*self.args, **self.kw) [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] raise exceptions.translate_fault(task_info.error) [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Faults: ['InvalidArgument'] [ 1704.521115] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] [ 1704.522319] env[62740]: INFO nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Terminating instance [ 1704.522953] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.523181] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1704.523422] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e96fbd95-b5fd-424a-8e6a-0337701375c0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.525650] env[62740]: DEBUG nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1704.525901] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1704.526643] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb2b549-b15a-4266-8dce-50976f53698b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.533655] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1704.533841] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e955d9e-7ea7-415b-8526-13418f3c6714 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.536158] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1704.536422] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1704.538037] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9bb6bff-96cc-46a4-bee9-47434f9ba7ec {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.543688] env[62740]: DEBUG oslo_vmware.api [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Waiting for the task: (returnval){ [ 1704.543688] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52fcac2f-9b9e-35d3-8a5b-d1dcf75f9db9" [ 1704.543688] env[62740]: _type = "Task" [ 1704.543688] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.551072] env[62740]: DEBUG oslo_vmware.api [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52fcac2f-9b9e-35d3-8a5b-d1dcf75f9db9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.613623] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1704.613867] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1704.614089] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Deleting the datastore file [datastore2] 3f36f081-2851-4339-860d-0a302ef4ee2c {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1704.614366] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc6c5779-7bbc-4bbe-8047-f940738bbdc4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.620541] env[62740]: DEBUG oslo_vmware.api [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Waiting for the task: (returnval){ [ 1704.620541] env[62740]: value = "task-640313" [ 1704.620541] env[62740]: _type = "Task" [ 1704.620541] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.628258] env[62740]: DEBUG oslo_vmware.api [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Task: {'id': task-640313, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.053824] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1705.054164] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Creating directory with path [datastore2] vmware_temp/da42e156-3019-4bf0-a646-c4c1bdf3b20c/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1705.054301] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6fb9a24-d22b-48b9-af8c-85a4743ec5c9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.065242] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Created directory with path [datastore2] vmware_temp/da42e156-3019-4bf0-a646-c4c1bdf3b20c/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1705.065424] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Fetch image to [datastore2] vmware_temp/da42e156-3019-4bf0-a646-c4c1bdf3b20c/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1705.065621] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/da42e156-3019-4bf0-a646-c4c1bdf3b20c/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1705.066329] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21912cfc-a861-4086-95e6-1e221d64daeb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.072669] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28696c86-6d2b-4da2-8221-37309027433e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.081529] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c228023-8c64-46bf-bf50-79e5556c7384 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.111666] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924bfebc-358f-4323-af2f-7c4df403d05f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.116840] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9298e9e9-4b04-465f-8637-77fb8b6323f1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.128504] env[62740]: DEBUG oslo_vmware.api [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Task: {'id': task-640313, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082115} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.128756] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1705.128936] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1705.129123] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1705.129299] env[62740]: INFO nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1705.131445] env[62740]: DEBUG nova.compute.claims [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1705.131617] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.131844] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.133719] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg cf8a7551a2854835ae523dc6652a914c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1705.136194] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1705.177885] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf8a7551a2854835ae523dc6652a914c [ 1705.189720] env[62740]: DEBUG oslo_vmware.rw_handles [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/da42e156-3019-4bf0-a646-c4c1bdf3b20c/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1705.250325] env[62740]: DEBUG oslo_vmware.rw_handles [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1705.250515] env[62740]: DEBUG oslo_vmware.rw_handles [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/da42e156-3019-4bf0-a646-c4c1bdf3b20c/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1705.398085] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef73a193-3907-407c-9b8c-382610e448fb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.405451] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b40fa5-bf72-4a7b-b506-95d329ea0ab5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.434138] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b713f5a-7ddb-4f2f-a36b-c61e29e1cd43 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.440596] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e18625-f095-462c-adf0-be729a065d58 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.454386] env[62740]: DEBUG nova.compute.provider_tree [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1705.454863] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 9e7b81bc8c2647849cf8b967706e8487 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1705.462493] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e7b81bc8c2647849cf8b967706e8487 [ 1705.463377] env[62740]: DEBUG nova.scheduler.client.report [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1705.465586] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg eb49dd75fa974439a9a609292b15e2a8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1705.476873] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb49dd75fa974439a9a609292b15e2a8 [ 1705.477617] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.346s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.478160] env[62740]: ERROR nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1705.478160] env[62740]: Faults: ['InvalidArgument'] [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Traceback (most recent call last): [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] self.driver.spawn(context, instance, image_meta, [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] self._fetch_image_if_missing(context, vi) [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] image_cache(vi, tmp_image_ds_loc) [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] vm_util.copy_virtual_disk( [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] session._wait_for_task(vmdk_copy_task) [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] return self.wait_for_task(task_ref) [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] return evt.wait() [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] result = hub.switch() [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] return self.greenlet.switch() [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] self.f(*self.args, **self.kw) [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] raise exceptions.translate_fault(task_info.error) [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Faults: ['InvalidArgument'] [ 1705.478160] env[62740]: ERROR nova.compute.manager [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] [ 1705.479097] env[62740]: DEBUG nova.compute.utils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1705.480271] env[62740]: DEBUG nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Build of instance 3f36f081-2851-4339-860d-0a302ef4ee2c was re-scheduled: A specified parameter was not correct: fileType [ 1705.480271] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1705.480728] env[62740]: DEBUG nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1705.480903] env[62740]: DEBUG nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1705.481086] env[62740]: DEBUG nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1705.481254] env[62740]: DEBUG nova.network.neutron [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1705.794577] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg a82f91a9e3304022b0a095882d2806af in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1705.807525] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a82f91a9e3304022b0a095882d2806af [ 1705.807525] env[62740]: DEBUG nova.network.neutron [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.807774] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 16548934582b4609ba280fded8f40962 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1705.821795] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16548934582b4609ba280fded8f40962 [ 1705.822448] env[62740]: INFO nova.compute.manager [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Took 0.34 seconds to deallocate network for instance. [ 1705.824102] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg f757e6f2bc16400ba786a0ae52bda627 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1705.854870] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f757e6f2bc16400ba786a0ae52bda627 [ 1705.857657] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 727657f417294476b32be6c0cb3a12d6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1705.887688] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 727657f417294476b32be6c0cb3a12d6 [ 1705.912238] env[62740]: INFO nova.scheduler.client.report [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Deleted allocations for instance 3f36f081-2851-4339-860d-0a302ef4ee2c [ 1705.919672] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg d1a58351f2e4477b9316dfc8ccd8bc76 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1705.934094] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1a58351f2e4477b9316dfc8ccd8bc76 [ 1705.934654] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5e278da3-23f2-45b1-9088-5dbe1e34ab4b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "3f36f081-2851-4339-860d-0a302ef4ee2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 513.345s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.935218] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 2bff16353c5843ffad5abdc378d10889 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1705.935985] env[62740]: DEBUG oslo_concurrency.lockutils [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "3f36f081-2851-4339-860d-0a302ef4ee2c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 317.291s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.936226] env[62740]: DEBUG oslo_concurrency.lockutils [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "3f36f081-2851-4339-860d-0a302ef4ee2c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.936434] env[62740]: DEBUG oslo_concurrency.lockutils [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "3f36f081-2851-4339-860d-0a302ef4ee2c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.936830] env[62740]: DEBUG oslo_concurrency.lockutils [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "3f36f081-2851-4339-860d-0a302ef4ee2c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.938978] env[62740]: INFO nova.compute.manager [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Terminating instance [ 1705.941125] env[62740]: DEBUG nova.compute.manager [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1705.941368] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1705.941932] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7d7412b0-613c-41a2-b1f6-8cc1ced4071c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.945626] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bff16353c5843ffad5abdc378d10889 [ 1705.946103] env[62740]: DEBUG nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1705.947714] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 6cf014a088854829ad5547878c7eb38a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1705.953246] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f67ce0-b7af-4049-a0ed-ce053e88a67a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.982336] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cf014a088854829ad5547878c7eb38a [ 1705.983045] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3f36f081-2851-4339-860d-0a302ef4ee2c could not be found. [ 1705.983175] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1705.983336] env[62740]: INFO nova.compute.manager [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1705.983623] env[62740]: DEBUG oslo.service.loopingcall [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1705.988404] env[62740]: DEBUG nova.compute.manager [-] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1705.988505] env[62740]: DEBUG nova.network.neutron [-] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1706.001319] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.001560] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.002981] env[62740]: INFO nova.compute.claims [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1706.004512] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg fbd1f76917d246af9c26e2b80fb6bf91 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1706.006911] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 35ddeb9608b84ca9bbdc679b56516b3f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1706.012766] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35ddeb9608b84ca9bbdc679b56516b3f [ 1706.013115] env[62740]: DEBUG nova.network.neutron [-] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1706.013451] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4f7a9fd70aac42e9a5c05ee17093caae in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1706.023821] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f7a9fd70aac42e9a5c05ee17093caae [ 1706.024595] env[62740]: INFO nova.compute.manager [-] [instance: 3f36f081-2851-4339-860d-0a302ef4ee2c] Took 0.04 seconds to deallocate network for instance. [ 1706.027738] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg f839d5c5cd4e44628a1209e79ab4ec34 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1706.044468] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbd1f76917d246af9c26e2b80fb6bf91 [ 1706.046126] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg f2bbb8ed2da34246beb3d04b2b3100c8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1706.055734] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f839d5c5cd4e44628a1209e79ab4ec34 [ 1706.056402] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2bbb8ed2da34246beb3d04b2b3100c8 [ 1706.074060] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg bde2f348354e4e14976a05c79716268d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1706.113588] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bde2f348354e4e14976a05c79716268d [ 1706.116884] env[62740]: DEBUG oslo_concurrency.lockutils [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "3f36f081-2851-4339-860d-0a302ef4ee2c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.181s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.117296] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-539ed5f6-ec8c-42c4-82a0-f435b92bfbf4 tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg eb71d8e4a093491b9d8c58091fe7c388 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1706.126969] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb71d8e4a093491b9d8c58091fe7c388 [ 1706.216264] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43265961-526d-4fa4-89f5-cc1cbffbca92 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.223829] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7754b453-020f-4755-9bea-01e5ce22e61a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.252545] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f49f83ab-cb9a-4ecc-9e18-4d7698c59c59 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.259304] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd42a59d-c367-4161-ae51-dabda405f6f6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.272165] env[62740]: DEBUG nova.compute.provider_tree [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1706.272697] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg be29661bd15b4d6097e77c049fe9c000 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1706.280322] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be29661bd15b4d6097e77c049fe9c000 [ 1706.281332] env[62740]: DEBUG nova.scheduler.client.report [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1706.283628] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 89ebbca26f804d7cb9c6087fae349761 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1706.296868] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89ebbca26f804d7cb9c6087fae349761 [ 1706.297613] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.296s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.298104] env[62740]: DEBUG nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1706.299781] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 57fbfb8d32ee4caaaafa10dd54afa783 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1706.328720] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57fbfb8d32ee4caaaafa10dd54afa783 [ 1706.329977] env[62740]: DEBUG nova.compute.utils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1706.330571] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 1c65fbebf2da404da82f0c4c641c1a4a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1706.331776] env[62740]: DEBUG nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1706.331948] env[62740]: DEBUG nova.network.neutron [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1706.340416] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c65fbebf2da404da82f0c4c641c1a4a [ 1706.340927] env[62740]: DEBUG nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1706.342502] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 9dfa8d9103464022b407974fa8bdc16f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1706.372015] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9dfa8d9103464022b407974fa8bdc16f [ 1706.374824] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 6e67de8954f24995b1992943d1aa7579 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1706.395397] env[62740]: DEBUG nova.policy [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3061d1e1e6544388537275e2933b02f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '960954b067f841cf9dff2016571551bf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1706.403692] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e67de8954f24995b1992943d1aa7579 [ 1706.404765] env[62740]: DEBUG nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1706.431484] env[62740]: DEBUG nova.virt.hardware [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1706.431715] env[62740]: DEBUG nova.virt.hardware [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1706.431875] env[62740]: DEBUG nova.virt.hardware [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1706.432072] env[62740]: DEBUG nova.virt.hardware [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1706.432227] env[62740]: DEBUG nova.virt.hardware [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1706.432377] env[62740]: DEBUG nova.virt.hardware [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1706.432584] env[62740]: DEBUG nova.virt.hardware [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1706.432745] env[62740]: DEBUG nova.virt.hardware [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1706.432917] env[62740]: DEBUG nova.virt.hardware [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1706.433094] env[62740]: DEBUG nova.virt.hardware [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1706.433271] env[62740]: DEBUG nova.virt.hardware [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1706.434128] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b633de-5e22-41eb-9258-0791260d046a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.442631] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5cc65ac-b03c-4ab8-92be-aa80c8a9d37f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.994513] env[62740]: DEBUG nova.network.neutron [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Successfully created port: fce6b1f4-0806-4ee4-9cb6-ffeb4921c402 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1707.637914] env[62740]: DEBUG nova.network.neutron [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Successfully updated port: fce6b1f4-0806-4ee4-9cb6-ffeb4921c402 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1707.637914] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg bb5698e94cf64588bbc13439f0111d50 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1707.647373] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb5698e94cf64588bbc13439f0111d50 [ 1707.648065] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "refresh_cache-1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.648197] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquired lock "refresh_cache-1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.648343] env[62740]: DEBUG nova.network.neutron [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1707.648776] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg f9e121e818e24b5bbf872ae45f1c826a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1707.656247] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9e121e818e24b5bbf872ae45f1c826a [ 1707.689095] env[62740]: DEBUG nova.network.neutron [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1707.850748] env[62740]: DEBUG nova.compute.manager [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Received event network-vif-plugged-fce6b1f4-0806-4ee4-9cb6-ffeb4921c402 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1707.850978] env[62740]: DEBUG oslo_concurrency.lockutils [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] Acquiring lock "1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.851205] env[62740]: DEBUG oslo_concurrency.lockutils [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] Lock "1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.851376] env[62740]: DEBUG oslo_concurrency.lockutils [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] Lock "1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.851546] env[62740]: DEBUG nova.compute.manager [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] No waiting events found dispatching network-vif-plugged-fce6b1f4-0806-4ee4-9cb6-ffeb4921c402 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1707.851713] env[62740]: WARNING nova.compute.manager [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Received unexpected event network-vif-plugged-fce6b1f4-0806-4ee4-9cb6-ffeb4921c402 for instance with vm_state building and task_state spawning. [ 1707.851874] env[62740]: DEBUG nova.compute.manager [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Received event network-changed-fce6b1f4-0806-4ee4-9cb6-ffeb4921c402 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1707.852375] env[62740]: DEBUG nova.compute.manager [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Refreshing instance network info cache due to event network-changed-fce6b1f4-0806-4ee4-9cb6-ffeb4921c402. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1707.852618] env[62740]: DEBUG oslo_concurrency.lockutils [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] Acquiring lock "refresh_cache-1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.879788] env[62740]: DEBUG nova.network.neutron [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Updating instance_info_cache with network_info: [{"id": "fce6b1f4-0806-4ee4-9cb6-ffeb4921c402", "address": "fa:16:3e:4b:ad:8a", "network": {"id": "911124ba-af71-4345-a712-1d9f1b0ec94d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1164437049-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "960954b067f841cf9dff2016571551bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfce6b1f4-08", "ovs_interfaceid": "fce6b1f4-0806-4ee4-9cb6-ffeb4921c402", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1707.880323] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 7b6201d1a96f4257a6ffd68be0d5b4ca in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1707.889992] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b6201d1a96f4257a6ffd68be0d5b4ca [ 1707.890634] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Releasing lock "refresh_cache-1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1707.891037] env[62740]: DEBUG nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Instance network_info: |[{"id": "fce6b1f4-0806-4ee4-9cb6-ffeb4921c402", "address": "fa:16:3e:4b:ad:8a", "network": {"id": "911124ba-af71-4345-a712-1d9f1b0ec94d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1164437049-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "960954b067f841cf9dff2016571551bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfce6b1f4-08", "ovs_interfaceid": "fce6b1f4-0806-4ee4-9cb6-ffeb4921c402", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1707.891390] env[62740]: DEBUG oslo_concurrency.lockutils [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] Acquired lock "refresh_cache-1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.891782] env[62740]: DEBUG nova.network.neutron [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Refreshing network info cache for port fce6b1f4-0806-4ee4-9cb6-ffeb4921c402 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1707.892167] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] Expecting reply to msg a9eb25a2b6274d64a151194086c6b238 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1707.893058] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:ad:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fce6b1f4-0806-4ee4-9cb6-ffeb4921c402', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1707.900843] env[62740]: DEBUG oslo.service.loopingcall [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1707.901306] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9eb25a2b6274d64a151194086c6b238 [ 1707.903676] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1707.904148] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6874a45-fa59-4fc4-8888-7917695c4321 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.924526] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1707.924526] env[62740]: value = "task-640314" [ 1707.924526] env[62740]: _type = "Task" [ 1707.924526] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.932439] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640314, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.219436] env[62740]: DEBUG nova.network.neutron [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Updated VIF entry in instance network info cache for port fce6b1f4-0806-4ee4-9cb6-ffeb4921c402. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1708.219791] env[62740]: DEBUG nova.network.neutron [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Updating instance_info_cache with network_info: [{"id": "fce6b1f4-0806-4ee4-9cb6-ffeb4921c402", "address": "fa:16:3e:4b:ad:8a", "network": {"id": "911124ba-af71-4345-a712-1d9f1b0ec94d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1164437049-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "960954b067f841cf9dff2016571551bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfce6b1f4-08", "ovs_interfaceid": "fce6b1f4-0806-4ee4-9cb6-ffeb4921c402", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1708.220385] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] Expecting reply to msg f433e9ab9834427abc90241e53b8d91e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1708.232962] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f433e9ab9834427abc90241e53b8d91e [ 1708.233626] env[62740]: DEBUG oslo_concurrency.lockutils [req-0ba25624-50f7-4842-848e-07e2f49b708f req-584d2f21-fd31-4a53-af76-af0fb31ce425 service nova] Releasing lock "refresh_cache-1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1708.434525] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640314, 'name': CreateVM_Task, 'duration_secs': 0.300447} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.434713] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1708.435408] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1708.435583] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1708.435923] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1708.436193] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b74427d6-eefb-456c-bd9c-bce33fbd3524 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.440637] env[62740]: DEBUG oslo_vmware.api [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Waiting for the task: (returnval){ [ 1708.440637] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5253e208-6795-c1d5-87c5-52223d8a5f80" [ 1708.440637] env[62740]: _type = "Task" [ 1708.440637] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.447941] env[62740]: DEBUG oslo_vmware.api [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5253e208-6795-c1d5-87c5-52223d8a5f80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.950658] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1708.951033] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1708.951163] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1711.005074] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "4ee71d81-7d8b-42f8-a27c-b4645169fa3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1711.005074] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "4ee71d81-7d8b-42f8-a27c-b4645169fa3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.185770] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d2990048786d4153b8f1fdde6955664d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1730.197887] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2990048786d4153b8f1fdde6955664d [ 1736.892374] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1737.891256] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1738.890636] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1738.890910] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Cleaning up deleted instances {{(pid=62740) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11217}} [ 1738.891514] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f3eaadfda5044810aca56f8219a8c350 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1738.900561] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3eaadfda5044810aca56f8219a8c350 [ 1738.901179] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] There are 0 instances to clean {{(pid=62740) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 1739.890624] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1739.892229] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1739.892229] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1739.892229] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1739.892229] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 51cfcfdc10104f8d82148738b8e250c5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1739.910966] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51cfcfdc10104f8d82148738b8e250c5 [ 1739.913325] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1739.913477] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1739.913611] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1739.913738] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1739.913861] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1739.913985] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1739.914121] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1739.914242] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1739.914363] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1739.914502] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1739.914598] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1739.915330] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1739.915330] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1739.915470] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1739.915576] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Cleaning up deleted instances with incomplete migration {{(pid=62740) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11255}} [ 1739.915863] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 4b5771df2f894bb6aa5ee244d18dbec1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1739.927567] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b5771df2f894bb6aa5ee244d18dbec1 [ 1741.903460] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1741.903808] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1741.904186] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 40dab712a1e74b84981179da0cfa1133 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1741.915075] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40dab712a1e74b84981179da0cfa1133 [ 1741.916162] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.916381] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.916555] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.916718] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1741.917811] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2dc299e-4bc8-42bf-ba39-66cd7fb32326 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.926884] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf91bde-c416-423c-aa8d-6c160dcf2f38 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.940567] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2698b62-d6e6-4d40-94c9-bae791758f60 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.946510] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e962f0ac-1370-42d6-b9f1-7e09995a3f29 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.974565] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181682MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1741.974711] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.974905] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.975798] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ee2496b8aaa34ff1ade723edb9af7f45 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1742.012037] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee2496b8aaa34ff1ade723edb9af7f45 [ 1742.016698] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 1da596b80db94a76865de8c73447af5d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1742.025626] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1da596b80db94a76865de8c73447af5d [ 1742.135686] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 913ddb91-9d46-459e-8775-c9f380ed3cc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.135903] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 07efd13e-40d0-4158-b17c-6f5c75474ce3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.136055] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388d71f2-b229-4666-a53d-d5b07e498eed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.136195] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c0daf074-eecb-4899-938f-477031efc6d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.136335] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 61fea037-aac3-47ef-aa6a-5dfa657d840d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.136488] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 86c57375-8328-4344-b228-2f1ce6efc71e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.136660] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a41506d2-33b2-40b8-badb-41312c7abbd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.136803] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3aa2858e-d422-408a-a83a-98382f971add actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.136924] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.137055] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.137636] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8ad611445eb24a7d947fb50773eda760 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1742.148312] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ad611445eb24a7d947fb50773eda760 [ 1742.149193] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 9a595940-16ba-401a-922f-331cf87093c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1742.149704] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg bc68fb085e0b4f0995ee0af5dbc4373f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1742.165168] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc68fb085e0b4f0995ee0af5dbc4373f [ 1742.165965] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d2fb90b7-1618-4f07-8854-81566887a7cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1742.166160] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5b14dd40854b4f35a09d03f2defa9ace in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1742.175852] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b14dd40854b4f35a09d03f2defa9ace [ 1742.176583] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 08197ee6-55de-40f8-8704-641c0614cad6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1742.177100] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 4304fe98760a4de98272ee84a6a053d9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1742.191025] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4304fe98760a4de98272ee84a6a053d9 [ 1742.191457] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 19f27c16-45b3-47d8-acf0-18255844431f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1742.191939] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg e24725cef8d242c787d81e8587b91581 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1742.200897] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e24725cef8d242c787d81e8587b91581 [ 1742.201555] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4ee71d81-7d8b-42f8-a27c-b4645169fa3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1742.201966] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1742.201966] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1742.372373] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61fcc13c-6cd0-478d-ac54-d818b9964280 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.379824] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8089cb03-b6ba-45f8-b8a1-2a2ea530e173 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.408372] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a6321c-7662-4ba4-ba07-303e57134187 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.414996] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f513f041-22ae-4b36-8c71-a0e0ed52081c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.427334] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1742.427758] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 60184bf455c84ac0966ad3237d18a34e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1742.436037] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60184bf455c84ac0966ad3237d18a34e [ 1742.436882] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1742.439133] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg eb37a7f97bfb437eb0fbec1cc1be0092 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1742.449690] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb37a7f97bfb437eb0fbec1cc1be0092 [ 1742.450312] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1742.450495] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.476s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.438373] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1744.891307] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1749.886580] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1749.887293] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 97df76f801b048a2ae90cf171327ff69 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1749.904486] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97df76f801b048a2ae90cf171327ff69 [ 1751.470992] env[62740]: WARNING oslo_vmware.rw_handles [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1751.470992] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1751.470992] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1751.470992] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1751.470992] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1751.470992] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1751.470992] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1751.470992] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1751.470992] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1751.470992] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1751.470992] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1751.470992] env[62740]: ERROR oslo_vmware.rw_handles [ 1751.470992] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/da42e156-3019-4bf0-a646-c4c1bdf3b20c/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1751.472804] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1751.473068] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Copying Virtual Disk [datastore2] vmware_temp/da42e156-3019-4bf0-a646-c4c1bdf3b20c/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/da42e156-3019-4bf0-a646-c4c1bdf3b20c/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1751.473343] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-898d1352-6abd-40f1-b354-5d5cc73b2a61 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.482142] env[62740]: DEBUG oslo_vmware.api [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Waiting for the task: (returnval){ [ 1751.482142] env[62740]: value = "task-640315" [ 1751.482142] env[62740]: _type = "Task" [ 1751.482142] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.490344] env[62740]: DEBUG oslo_vmware.api [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Task: {'id': task-640315, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.991736] env[62740]: DEBUG oslo_vmware.exceptions [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1751.992035] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1751.992631] env[62740]: ERROR nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1751.992631] env[62740]: Faults: ['InvalidArgument'] [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Traceback (most recent call last): [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] yield resources [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self.driver.spawn(context, instance, image_meta, [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self._fetch_image_if_missing(context, vi) [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] image_cache(vi, tmp_image_ds_loc) [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] vm_util.copy_virtual_disk( [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] session._wait_for_task(vmdk_copy_task) [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return self.wait_for_task(task_ref) [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return evt.wait() [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] result = hub.switch() [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return self.greenlet.switch() [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self.f(*self.args, **self.kw) [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] raise exceptions.translate_fault(task_info.error) [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Faults: ['InvalidArgument'] [ 1751.992631] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] [ 1751.993770] env[62740]: INFO nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Terminating instance [ 1751.995250] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.995431] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1751.996064] env[62740]: DEBUG nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1751.996259] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1751.996496] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f41f796d-e3fe-4810-9867-5aee6f482c85 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.998958] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bacd29d2-cb57-4cc3-a854-44dcf09953c0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.005364] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1752.005574] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57e35d42-355d-40b6-9a3a-bf1c7cd5b798 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.007641] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1752.007834] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1752.008778] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f49c6c6-5ebf-41aa-b85e-bb2df7306d79 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.013348] env[62740]: DEBUG oslo_vmware.api [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for the task: (returnval){ [ 1752.013348] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5266c395-aba7-15f5-91ea-44c2d28311e7" [ 1752.013348] env[62740]: _type = "Task" [ 1752.013348] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.024129] env[62740]: DEBUG oslo_vmware.api [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5266c395-aba7-15f5-91ea-44c2d28311e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.071711] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1752.071920] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1752.072115] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Deleting the datastore file [datastore2] 913ddb91-9d46-459e-8775-c9f380ed3cc4 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1752.072370] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb273b2c-342f-432c-8840-217aede46d72 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.079022] env[62740]: DEBUG oslo_vmware.api [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Waiting for the task: (returnval){ [ 1752.079022] env[62740]: value = "task-640317" [ 1752.079022] env[62740]: _type = "Task" [ 1752.079022] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.086107] env[62740]: DEBUG oslo_vmware.api [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Task: {'id': task-640317, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.524056] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1752.524410] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Creating directory with path [datastore2] vmware_temp/5ab397cb-080a-4698-8a98-4d779569f052/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1752.524546] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de1b1033-ade0-44b6-8d41-158cc0ab2a21 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.535229] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Created directory with path [datastore2] vmware_temp/5ab397cb-080a-4698-8a98-4d779569f052/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1752.535416] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Fetch image to [datastore2] vmware_temp/5ab397cb-080a-4698-8a98-4d779569f052/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1752.535592] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/5ab397cb-080a-4698-8a98-4d779569f052/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1752.536314] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96807588-dcd2-41fd-a09d-75509ae83d5f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.542775] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20393379-87fc-4b30-9637-351305f14d6e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.551663] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91d0059-73cf-4be1-af7d-e146729ea5a2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.584522] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ecf5b8-4e46-443d-8b52-1aff8a16999a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.591241] env[62740]: DEBUG oslo_vmware.api [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Task: {'id': task-640317, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075128} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.592624] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1752.592815] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1752.592992] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1752.593184] env[62740]: INFO nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1752.594918] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-98d65839-25a0-4d28-86b6-2aac73f6e31e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.596746] env[62740]: DEBUG nova.compute.claims [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1752.596919] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.597145] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1752.599056] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 09ceb3131a8b4d3a9e6c45e7cf366b8d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1752.617546] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1752.638867] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09ceb3131a8b4d3a9e6c45e7cf366b8d [ 1752.670755] env[62740]: DEBUG oslo_vmware.rw_handles [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5ab397cb-080a-4698-8a98-4d779569f052/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1752.735400] env[62740]: DEBUG oslo_vmware.rw_handles [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1752.735595] env[62740]: DEBUG oslo_vmware.rw_handles [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5ab397cb-080a-4698-8a98-4d779569f052/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1752.861762] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc1a133-97bf-4de5-b5a2-be8575a75d5d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.869268] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e8c3ae-b97d-4c92-9f3c-0dfdc2bd6daa {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.897803] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1752.898154] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f2cc8285eaf94cf0952e713c91431cea in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1752.899569] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6500868a-8c04-4288-843c-15ffccce40c8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.906010] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2cc8285eaf94cf0952e713c91431cea [ 1752.907334] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906da144-bdfd-4c48-ae94-1834d8db8666 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.920577] env[62740]: DEBUG nova.compute.provider_tree [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1752.921055] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 366aa0b758ba407fbeaa4a3f5988a399 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1752.927695] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 366aa0b758ba407fbeaa4a3f5988a399 [ 1752.928570] env[62740]: DEBUG nova.scheduler.client.report [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1752.930855] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg faacd73674274df4932e52677a812226 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1752.942269] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg faacd73674274df4932e52677a812226 [ 1752.942990] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.346s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1752.943515] env[62740]: ERROR nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1752.943515] env[62740]: Faults: ['InvalidArgument'] [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Traceback (most recent call last): [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self.driver.spawn(context, instance, image_meta, [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self._fetch_image_if_missing(context, vi) [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] image_cache(vi, tmp_image_ds_loc) [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] vm_util.copy_virtual_disk( [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] session._wait_for_task(vmdk_copy_task) [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return self.wait_for_task(task_ref) [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return evt.wait() [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] result = hub.switch() [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return self.greenlet.switch() [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self.f(*self.args, **self.kw) [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] raise exceptions.translate_fault(task_info.error) [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Faults: ['InvalidArgument'] [ 1752.943515] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] [ 1752.944483] env[62740]: DEBUG nova.compute.utils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1752.945639] env[62740]: DEBUG nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Build of instance 913ddb91-9d46-459e-8775-c9f380ed3cc4 was re-scheduled: A specified parameter was not correct: fileType [ 1752.945639] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1752.946202] env[62740]: DEBUG nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1752.946202] env[62740]: DEBUG nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1752.946330] env[62740]: DEBUG nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1752.946493] env[62740]: DEBUG nova.network.neutron [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1753.062699] env[62740]: DEBUG neutronclient.v2_0.client [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62740) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1753.064281] env[62740]: ERROR nova.compute.manager [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Traceback (most recent call last): [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self.driver.spawn(context, instance, image_meta, [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self._fetch_image_if_missing(context, vi) [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] image_cache(vi, tmp_image_ds_loc) [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] vm_util.copy_virtual_disk( [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] session._wait_for_task(vmdk_copy_task) [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return self.wait_for_task(task_ref) [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return evt.wait() [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] result = hub.switch() [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return self.greenlet.switch() [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self.f(*self.args, **self.kw) [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] raise exceptions.translate_fault(task_info.error) [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Faults: ['InvalidArgument'] [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] During handling of the above exception, another exception occurred: [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Traceback (most recent call last): [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self._build_and_run_instance(context, instance, image, [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] raise exception.RescheduledException( [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] nova.exception.RescheduledException: Build of instance 913ddb91-9d46-459e-8775-c9f380ed3cc4 was re-scheduled: A specified parameter was not correct: fileType [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Faults: ['InvalidArgument'] [ 1753.064281] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] During handling of the above exception, another exception occurred: [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Traceback (most recent call last): [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] ret = obj(*args, **kwargs) [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] exception_handler_v20(status_code, error_body) [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] raise client_exc(message=error_message, [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Neutron server returns request_ids: ['req-853c5139-94d6-432c-8f18-85e167083923'] [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] During handling of the above exception, another exception occurred: [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Traceback (most recent call last): [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self._deallocate_network(context, instance, requested_networks) [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self.network_api.deallocate_for_instance( [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] data = neutron.list_ports(**search_opts) [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] ret = obj(*args, **kwargs) [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return self.list('ports', self.ports_path, retrieve_all, [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] ret = obj(*args, **kwargs) [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] for r in self._pagination(collection, path, **params): [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] res = self.get(path, params=params) [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] ret = obj(*args, **kwargs) [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return self.retry_request("GET", action, body=body, [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] ret = obj(*args, **kwargs) [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1753.065564] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return self.do_request(method, action, body=body, [ 1753.067333] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.067333] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] ret = obj(*args, **kwargs) [ 1753.067333] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1753.067333] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self._handle_fault_response(status_code, replybody, resp) [ 1753.067333] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1753.067333] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] raise exception.Unauthorized() [ 1753.067333] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] nova.exception.Unauthorized: Not authorized. [ 1753.067333] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] [ 1753.067333] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 8aa35e1df8f6441583ad1e00db4fdeb8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.094311] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8aa35e1df8f6441583ad1e00db4fdeb8 [ 1753.117892] env[62740]: INFO nova.scheduler.client.report [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Deleted allocations for instance 913ddb91-9d46-459e-8775-c9f380ed3cc4 [ 1753.123623] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 842a9f8c5e354b82b35ce23679580259 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.137803] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 842a9f8c5e354b82b35ce23679580259 [ 1753.138351] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7ae09f43-f9b0-44bc-b2d1-d98085b108f4 tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "913ddb91-9d46-459e-8775-c9f380ed3cc4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 534.055s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.138922] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 416bd9fd5328451c9a5dbbe6dd585bdf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.139678] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "913ddb91-9d46-459e-8775-c9f380ed3cc4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 338.227s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.139886] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Acquiring lock "913ddb91-9d46-459e-8775-c9f380ed3cc4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.140134] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "913ddb91-9d46-459e-8775-c9f380ed3cc4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.140306] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "913ddb91-9d46-459e-8775-c9f380ed3cc4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.142213] env[62740]: INFO nova.compute.manager [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Terminating instance [ 1753.143898] env[62740]: DEBUG nova.compute.manager [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1753.144099] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1753.144557] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1187fc74-ea87-467a-9d0e-2eb1ebfcb7ab {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.150009] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 416bd9fd5328451c9a5dbbe6dd585bdf [ 1753.150442] env[62740]: DEBUG nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1753.152050] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 6680a073b7f743578ca28934c8b84867 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.157034] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e52632c-7a00-4cae-9bf1-54b5faf3fd1d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.184131] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6680a073b7f743578ca28934c8b84867 [ 1753.184783] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 913ddb91-9d46-459e-8775-c9f380ed3cc4 could not be found. [ 1753.184783] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1753.184991] env[62740]: INFO nova.compute.manager [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1753.185263] env[62740]: DEBUG oslo.service.loopingcall [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1753.187556] env[62740]: DEBUG nova.compute.manager [-] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1753.187618] env[62740]: DEBUG nova.network.neutron [-] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1753.201101] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.201358] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.203030] env[62740]: INFO nova.compute.claims [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1753.204579] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 4aec1e1488d94772a19972b1caa429de in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.240156] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4aec1e1488d94772a19972b1caa429de [ 1753.242020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 6422c70d7b584b8e8edc26873520002f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.250030] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6422c70d7b584b8e8edc26873520002f [ 1753.285925] env[62740]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62740) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1753.286202] env[62740]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-a2fd6fab-259d-459b-9a34-61e6c8057183'] [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1753.286921] env[62740]: ERROR oslo.service.loopingcall [ 1753.288493] env[62740]: ERROR nova.compute.manager [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1753.289316] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 4c324d452c404be3955c2569aa90c94e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.320676] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c324d452c404be3955c2569aa90c94e [ 1753.322428] env[62740]: ERROR nova.compute.manager [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Traceback (most recent call last): [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] ret = obj(*args, **kwargs) [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] exception_handler_v20(status_code, error_body) [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] raise client_exc(message=error_message, [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Neutron server returns request_ids: ['req-a2fd6fab-259d-459b-9a34-61e6c8057183'] [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] During handling of the above exception, another exception occurred: [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Traceback (most recent call last): [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self._delete_instance(context, instance, bdms) [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self._shutdown_instance(context, instance, bdms) [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self._try_deallocate_network(context, instance, requested_networks) [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] with excutils.save_and_reraise_exception(): [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self.force_reraise() [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] raise self.value [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] _deallocate_network_with_retries() [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return evt.wait() [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] result = hub.switch() [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return self.greenlet.switch() [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] result = func(*self.args, **self.kw) [ 1753.322428] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] result = f(*args, **kwargs) [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self._deallocate_network( [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self.network_api.deallocate_for_instance( [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] data = neutron.list_ports(**search_opts) [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] ret = obj(*args, **kwargs) [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return self.list('ports', self.ports_path, retrieve_all, [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] ret = obj(*args, **kwargs) [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] for r in self._pagination(collection, path, **params): [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] res = self.get(path, params=params) [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] ret = obj(*args, **kwargs) [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return self.retry_request("GET", action, body=body, [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] ret = obj(*args, **kwargs) [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] return self.do_request(method, action, body=body, [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] ret = obj(*args, **kwargs) [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] self._handle_fault_response(status_code, replybody, resp) [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1753.323701] env[62740]: ERROR nova.compute.manager [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] [ 1753.325483] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 1a05b3a66b5a4ed5b100349bbeb491b8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.350550] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a05b3a66b5a4ed5b100349bbeb491b8 [ 1753.351672] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Lock "913ddb91-9d46-459e-8775-c9f380ed3cc4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.212s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.352329] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 310cc80576a14969aa57bf2e9544afc6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.361305] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 310cc80576a14969aa57bf2e9544afc6 [ 1753.362557] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg a1967c73e9124d1390965c0b664abf81 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.381214] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1967c73e9124d1390965c0b664abf81 [ 1753.383043] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Expecting reply to msg 7532f49db78246e2b49d5c0df25d9b43 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.411557] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7532f49db78246e2b49d5c0df25d9b43 [ 1753.412967] env[62740]: INFO nova.compute.manager [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] [instance: 913ddb91-9d46-459e-8775-c9f380ed3cc4] Successfully reverted task state from None on failure for instance. [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server [None req-b10f5b87-ac6d-48a3-a6a0-64add319666c tempest-DeleteServersAdminTestJSON-1607895434 tempest-DeleteServersAdminTestJSON-1607895434-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-a2fd6fab-259d-459b-9a34-61e6c8057183'] [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server raise self.value [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server raise self.value [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server raise self.value [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1753.417041] env[62740]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server raise self.value [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server raise self.value [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1753.418799] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1753.420601] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1753.420601] env[62740]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1753.420601] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1753.420601] env[62740]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1753.420601] env[62740]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1753.420601] env[62740]: ERROR oslo_messaging.rpc.server [ 1753.440985] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7037801c-ecdf-4011-962e-617cac2db8c0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.448302] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4fe44b6-cb5d-413c-8881-dd1f9f206d69 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.480879] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398752b7-6619-4a1e-b471-e7d5334d54ab {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.488767] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526a7840-6a5e-406b-9dd2-83f119014f9b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.502613] env[62740]: DEBUG nova.compute.provider_tree [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1753.503011] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg b03db16f5c8445b4b690c75bc63e47f9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.512563] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b03db16f5c8445b4b690c75bc63e47f9 [ 1753.513467] env[62740]: DEBUG nova.scheduler.client.report [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1753.515630] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 3db06f59d1744527ac902a073064fd92 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.526536] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3db06f59d1744527ac902a073064fd92 [ 1753.527326] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.326s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.527862] env[62740]: DEBUG nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1753.529507] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 8cdd8b97df3a4d3882cf31fa55a42ba4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.562371] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cdd8b97df3a4d3882cf31fa55a42ba4 [ 1753.563832] env[62740]: DEBUG nova.compute.utils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1753.564457] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 1c2fdbc27c18425ca715fc80f8fdb060 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.565932] env[62740]: DEBUG nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1753.566203] env[62740]: DEBUG nova.network.neutron [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1753.575840] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c2fdbc27c18425ca715fc80f8fdb060 [ 1753.576408] env[62740]: DEBUG nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1753.578044] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 10b0c00760394f39bda74bd786dee254 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.607821] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10b0c00760394f39bda74bd786dee254 [ 1753.610427] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 32a28ee8dedf481485ed9cc5eb22cef8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1753.631056] env[62740]: DEBUG nova.policy [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd3f5aaf4abae42da9a5ad7044f84647d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d3897fd0522431c87c8830678fd59ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1753.642890] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32a28ee8dedf481485ed9cc5eb22cef8 [ 1753.644022] env[62740]: DEBUG nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1753.670474] env[62740]: DEBUG nova.virt.hardware [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1753.670720] env[62740]: DEBUG nova.virt.hardware [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1753.670880] env[62740]: DEBUG nova.virt.hardware [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1753.671076] env[62740]: DEBUG nova.virt.hardware [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1753.671227] env[62740]: DEBUG nova.virt.hardware [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1753.671375] env[62740]: DEBUG nova.virt.hardware [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1753.671598] env[62740]: DEBUG nova.virt.hardware [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1753.671763] env[62740]: DEBUG nova.virt.hardware [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1753.671945] env[62740]: DEBUG nova.virt.hardware [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1753.672222] env[62740]: DEBUG nova.virt.hardware [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1753.672428] env[62740]: DEBUG nova.virt.hardware [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1753.673492] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b34ef9-533c-4ab8-bac7-de33d75a7921 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.681534] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76770fa-4b9e-49b9-aa3e-132989382768 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.930260] env[62740]: DEBUG nova.network.neutron [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Successfully created port: 3ff4c436-59c0-4ea6-b33d-4a702aa5986c {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1754.611074] env[62740]: DEBUG nova.compute.manager [req-b9605b68-211b-4f29-865a-5e2b35ee9f97 req-8ad4f13d-bb13-4c97-9dec-f3a06863b64a service nova] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Received event network-vif-plugged-3ff4c436-59c0-4ea6-b33d-4a702aa5986c {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1754.611370] env[62740]: DEBUG oslo_concurrency.lockutils [req-b9605b68-211b-4f29-865a-5e2b35ee9f97 req-8ad4f13d-bb13-4c97-9dec-f3a06863b64a service nova] Acquiring lock "9a595940-16ba-401a-922f-331cf87093c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.611579] env[62740]: DEBUG oslo_concurrency.lockutils [req-b9605b68-211b-4f29-865a-5e2b35ee9f97 req-8ad4f13d-bb13-4c97-9dec-f3a06863b64a service nova] Lock "9a595940-16ba-401a-922f-331cf87093c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.611748] env[62740]: DEBUG oslo_concurrency.lockutils [req-b9605b68-211b-4f29-865a-5e2b35ee9f97 req-8ad4f13d-bb13-4c97-9dec-f3a06863b64a service nova] Lock "9a595940-16ba-401a-922f-331cf87093c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.611920] env[62740]: DEBUG nova.compute.manager [req-b9605b68-211b-4f29-865a-5e2b35ee9f97 req-8ad4f13d-bb13-4c97-9dec-f3a06863b64a service nova] [instance: 9a595940-16ba-401a-922f-331cf87093c9] No waiting events found dispatching network-vif-plugged-3ff4c436-59c0-4ea6-b33d-4a702aa5986c {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1754.612191] env[62740]: WARNING nova.compute.manager [req-b9605b68-211b-4f29-865a-5e2b35ee9f97 req-8ad4f13d-bb13-4c97-9dec-f3a06863b64a service nova] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Received unexpected event network-vif-plugged-3ff4c436-59c0-4ea6-b33d-4a702aa5986c for instance with vm_state building and task_state spawning. [ 1754.651778] env[62740]: DEBUG nova.network.neutron [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Successfully updated port: 3ff4c436-59c0-4ea6-b33d-4a702aa5986c {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1754.652677] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 95f13a3709a741128f46a99ea21d028f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1754.668314] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95f13a3709a741128f46a99ea21d028f [ 1754.668448] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "refresh_cache-9a595940-16ba-401a-922f-331cf87093c9" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.668580] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquired lock "refresh_cache-9a595940-16ba-401a-922f-331cf87093c9" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.668798] env[62740]: DEBUG nova.network.neutron [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1754.669201] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg e036b492002840ce894a6f2da90a1418 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1754.676599] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e036b492002840ce894a6f2da90a1418 [ 1754.734867] env[62740]: DEBUG nova.network.neutron [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1754.918718] env[62740]: DEBUG nova.network.neutron [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Updating instance_info_cache with network_info: [{"id": "3ff4c436-59c0-4ea6-b33d-4a702aa5986c", "address": "fa:16:3e:e4:30:e0", "network": {"id": "fe48b87d-d6bf-41e3-8587-388615fdb42f", "bridge": "br-int", "label": "tempest-ServersTestJSON-590567929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3897fd0522431c87c8830678fd59ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ff4c436-59", "ovs_interfaceid": "3ff4c436-59c0-4ea6-b33d-4a702aa5986c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.919280] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 04bea7722ef84959809ff5d583e612c7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1754.929697] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04bea7722ef84959809ff5d583e612c7 [ 1754.930335] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Releasing lock "refresh_cache-9a595940-16ba-401a-922f-331cf87093c9" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.930616] env[62740]: DEBUG nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Instance network_info: |[{"id": "3ff4c436-59c0-4ea6-b33d-4a702aa5986c", "address": "fa:16:3e:e4:30:e0", "network": {"id": "fe48b87d-d6bf-41e3-8587-388615fdb42f", "bridge": "br-int", "label": "tempest-ServersTestJSON-590567929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3897fd0522431c87c8830678fd59ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ff4c436-59", "ovs_interfaceid": "3ff4c436-59c0-4ea6-b33d-4a702aa5986c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1754.931042] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:30:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3ff4c436-59c0-4ea6-b33d-4a702aa5986c', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1754.938505] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Creating folder: Project (1d3897fd0522431c87c8830678fd59ae). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1754.939062] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38b7659e-6cc3-4d7e-b909-926ac63ba372 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.950787] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Created folder: Project (1d3897fd0522431c87c8830678fd59ae) in parent group-v156037. [ 1754.950968] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Creating folder: Instances. Parent ref: group-v156172. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1754.951195] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23912e68-3840-4a12-9ced-d801c4c09bda {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.959376] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Created folder: Instances in parent group-v156172. [ 1754.959595] env[62740]: DEBUG oslo.service.loopingcall [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1754.959788] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1754.959980] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47d97b10-fdea-467a-bab7-8d55e2c9baef {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.977898] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1754.977898] env[62740]: value = "task-640320" [ 1754.977898] env[62740]: _type = "Task" [ 1754.977898] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.984877] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640320, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.489554] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640320, 'name': CreateVM_Task, 'duration_secs': 0.295253} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.489755] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1755.490465] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1755.490640] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1755.491115] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1755.491421] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-068cfe33-a2fb-49ef-8ca7-06890640950e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.496056] env[62740]: DEBUG oslo_vmware.api [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Waiting for the task: (returnval){ [ 1755.496056] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52324a0b-e6fb-236f-9a93-4f9797eca1bf" [ 1755.496056] env[62740]: _type = "Task" [ 1755.496056] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.505071] env[62740]: DEBUG oslo_vmware.api [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52324a0b-e6fb-236f-9a93-4f9797eca1bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.006523] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1756.006523] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1756.006855] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1756.634362] env[62740]: DEBUG nova.compute.manager [req-f6c6a903-e2be-4655-a5b0-8fe4fd7ce04a req-821c3253-c8b7-44a6-bbfd-97c3277e5624 service nova] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Received event network-changed-3ff4c436-59c0-4ea6-b33d-4a702aa5986c {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1756.634641] env[62740]: DEBUG nova.compute.manager [req-f6c6a903-e2be-4655-a5b0-8fe4fd7ce04a req-821c3253-c8b7-44a6-bbfd-97c3277e5624 service nova] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Refreshing instance network info cache due to event network-changed-3ff4c436-59c0-4ea6-b33d-4a702aa5986c. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1756.634761] env[62740]: DEBUG oslo_concurrency.lockutils [req-f6c6a903-e2be-4655-a5b0-8fe4fd7ce04a req-821c3253-c8b7-44a6-bbfd-97c3277e5624 service nova] Acquiring lock "refresh_cache-9a595940-16ba-401a-922f-331cf87093c9" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1756.634913] env[62740]: DEBUG oslo_concurrency.lockutils [req-f6c6a903-e2be-4655-a5b0-8fe4fd7ce04a req-821c3253-c8b7-44a6-bbfd-97c3277e5624 service nova] Acquired lock "refresh_cache-9a595940-16ba-401a-922f-331cf87093c9" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1756.635239] env[62740]: DEBUG nova.network.neutron [req-f6c6a903-e2be-4655-a5b0-8fe4fd7ce04a req-821c3253-c8b7-44a6-bbfd-97c3277e5624 service nova] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Refreshing network info cache for port 3ff4c436-59c0-4ea6-b33d-4a702aa5986c {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1756.635816] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-f6c6a903-e2be-4655-a5b0-8fe4fd7ce04a req-821c3253-c8b7-44a6-bbfd-97c3277e5624 service nova] Expecting reply to msg 79f9479ec4de4e5b95fa154e1aa95d4e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1756.643029] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79f9479ec4de4e5b95fa154e1aa95d4e [ 1756.969651] env[62740]: DEBUG nova.network.neutron [req-f6c6a903-e2be-4655-a5b0-8fe4fd7ce04a req-821c3253-c8b7-44a6-bbfd-97c3277e5624 service nova] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Updated VIF entry in instance network info cache for port 3ff4c436-59c0-4ea6-b33d-4a702aa5986c. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1756.970019] env[62740]: DEBUG nova.network.neutron [req-f6c6a903-e2be-4655-a5b0-8fe4fd7ce04a req-821c3253-c8b7-44a6-bbfd-97c3277e5624 service nova] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Updating instance_info_cache with network_info: [{"id": "3ff4c436-59c0-4ea6-b33d-4a702aa5986c", "address": "fa:16:3e:e4:30:e0", "network": {"id": "fe48b87d-d6bf-41e3-8587-388615fdb42f", "bridge": "br-int", "label": "tempest-ServersTestJSON-590567929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3897fd0522431c87c8830678fd59ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ff4c436-59", "ovs_interfaceid": "3ff4c436-59c0-4ea6-b33d-4a702aa5986c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1756.970540] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-f6c6a903-e2be-4655-a5b0-8fe4fd7ce04a req-821c3253-c8b7-44a6-bbfd-97c3277e5624 service nova] Expecting reply to msg 42f19ce37eeb4dd7a0701b301021e507 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1756.978924] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42f19ce37eeb4dd7a0701b301021e507 [ 1756.979553] env[62740]: DEBUG oslo_concurrency.lockutils [req-f6c6a903-e2be-4655-a5b0-8fe4fd7ce04a req-821c3253-c8b7-44a6-bbfd-97c3277e5624 service nova] Releasing lock "refresh_cache-9a595940-16ba-401a-922f-331cf87093c9" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.815837] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 9db6a2d944884befb45b67df488737d7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1764.827726] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9db6a2d944884befb45b67df488737d7 [ 1764.828210] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.328746] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.328746] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.300686] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 9df937f005a24365b6eaf0cbc569171f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1784.312445] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9df937f005a24365b6eaf0cbc569171f [ 1784.313317] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1797.903813] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1799.890989] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1799.891339] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1799.891339] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1799.891923] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg cb4801f6539d4b96b3fb3ce6af374066 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1799.912666] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb4801f6539d4b96b3fb3ce6af374066 [ 1799.914811] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1799.914963] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1799.915113] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1799.915244] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1799.915370] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1799.915495] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1799.915618] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1799.915739] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1799.915862] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1799.915982] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1799.916117] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1799.916588] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1800.891716] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1800.891716] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1800.891716] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1801.661283] env[62740]: WARNING oslo_vmware.rw_handles [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1801.661283] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1801.661283] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1801.661283] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1801.661283] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1801.661283] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1801.661283] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1801.661283] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1801.661283] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1801.661283] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1801.661283] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1801.661283] env[62740]: ERROR oslo_vmware.rw_handles [ 1801.661897] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/5ab397cb-080a-4698-8a98-4d779569f052/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1801.663607] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1801.663861] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Copying Virtual Disk [datastore2] vmware_temp/5ab397cb-080a-4698-8a98-4d779569f052/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/5ab397cb-080a-4698-8a98-4d779569f052/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1801.664165] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9ae1538-cb12-447b-a7ba-0e02491ce6c5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.672549] env[62740]: DEBUG oslo_vmware.api [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for the task: (returnval){ [ 1801.672549] env[62740]: value = "task-640321" [ 1801.672549] env[62740]: _type = "Task" [ 1801.672549] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.680151] env[62740]: DEBUG oslo_vmware.api [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Task: {'id': task-640321, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.182313] env[62740]: DEBUG oslo_vmware.exceptions [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1802.182672] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.183148] env[62740]: ERROR nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1802.183148] env[62740]: Faults: ['InvalidArgument'] [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Traceback (most recent call last): [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] yield resources [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] self.driver.spawn(context, instance, image_meta, [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] self._fetch_image_if_missing(context, vi) [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] image_cache(vi, tmp_image_ds_loc) [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] vm_util.copy_virtual_disk( [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] session._wait_for_task(vmdk_copy_task) [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] return self.wait_for_task(task_ref) [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] return evt.wait() [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] result = hub.switch() [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] return self.greenlet.switch() [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] self.f(*self.args, **self.kw) [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] raise exceptions.translate_fault(task_info.error) [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Faults: ['InvalidArgument'] [ 1802.183148] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] [ 1802.184147] env[62740]: INFO nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Terminating instance [ 1802.184990] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.185227] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1802.185472] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71c3b4cb-ce98-4957-a8b9-751b9d1d5e96 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.187563] env[62740]: DEBUG nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1802.187752] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1802.188469] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d1c21b-cb46-4dea-a0cd-72cc656229a2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.195410] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1802.195584] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad7cdf27-67dc-4bbb-a6dd-31741a2a2334 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.197607] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1802.197779] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1802.198730] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e91f12c-af13-4069-9292-105b16945a26 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.203134] env[62740]: DEBUG oslo_vmware.api [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Waiting for the task: (returnval){ [ 1802.203134] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52232bed-0c08-a2de-e461-b8213b7c2c71" [ 1802.203134] env[62740]: _type = "Task" [ 1802.203134] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.210141] env[62740]: DEBUG oslo_vmware.api [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52232bed-0c08-a2de-e461-b8213b7c2c71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.277665] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1802.277912] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1802.278071] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Deleting the datastore file [datastore2] 07efd13e-40d0-4158-b17c-6f5c75474ce3 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1802.278352] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f15901a-2596-4e26-a2c0-ae896f551c00 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.284772] env[62740]: DEBUG oslo_vmware.api [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for the task: (returnval){ [ 1802.284772] env[62740]: value = "task-640323" [ 1802.284772] env[62740]: _type = "Task" [ 1802.284772] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.292109] env[62740]: DEBUG oslo_vmware.api [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Task: {'id': task-640323, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.713899] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1802.714138] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Creating directory with path [datastore2] vmware_temp/6350c174-8630-4dcd-bbfa-1bcb4a2853ad/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1802.714382] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d39ed5d-dfcf-4df4-bd88-d563caa0d375 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.726611] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Created directory with path [datastore2] vmware_temp/6350c174-8630-4dcd-bbfa-1bcb4a2853ad/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1802.726831] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Fetch image to [datastore2] vmware_temp/6350c174-8630-4dcd-bbfa-1bcb4a2853ad/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1802.727017] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/6350c174-8630-4dcd-bbfa-1bcb4a2853ad/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1802.727726] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea93373-66af-463f-af0f-bb68d8926a65 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.735189] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdaa42e5-f1e4-472c-bca9-a11a6f940fb5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.743659] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9bca28-bedb-4ba5-b237-bd4a0cb041b1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.773276] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8efe700-0ae7-4d2f-8000-6e53916fbd0f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.778428] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-36da0bb9-b470-4467-9ecf-41d6613f1d68 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.792129] env[62740]: DEBUG oslo_vmware.api [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Task: {'id': task-640323, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071363} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.792353] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1802.792537] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1802.792712] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1802.792887] env[62740]: INFO nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1802.794929] env[62740]: DEBUG nova.compute.claims [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1802.795115] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.795363] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1802.797371] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 39dd4149a5924e19890d1c3d469926cf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1802.799519] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1802.836103] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39dd4149a5924e19890d1c3d469926cf [ 1802.853653] env[62740]: DEBUG oslo_vmware.rw_handles [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6350c174-8630-4dcd-bbfa-1bcb4a2853ad/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1802.855543] env[62740]: DEBUG nova.scheduler.client.report [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Refreshing inventories for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1802.911652] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1802.912744] env[62740]: DEBUG nova.scheduler.client.report [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Updating ProviderTree inventory for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1802.912947] env[62740]: DEBUG nova.compute.provider_tree [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Updating inventory in ProviderTree for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1802.914877] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1802.915239] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5a5349976039468aa53bc8707e01837f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1802.918392] env[62740]: DEBUG oslo_vmware.rw_handles [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1802.918562] env[62740]: DEBUG oslo_vmware.rw_handles [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6350c174-8630-4dcd-bbfa-1bcb4a2853ad/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1802.928164] env[62740]: DEBUG nova.scheduler.client.report [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Refreshing aggregate associations for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0, aggregates: None {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1802.930589] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a5349976039468aa53bc8707e01837f [ 1802.931933] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.950189] env[62740]: DEBUG nova.scheduler.client.report [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Refreshing trait associations for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1803.113037] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660364fd-8339-4447-9a03-91b8bf0a0db9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.120086] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce282fa-edb7-44c2-bd5c-014fba77849f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.150058] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b963c1c-67c9-4f92-adc2-ff168a74edd7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.156546] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e027e6-81a6-476b-9519-72c9d635bc29 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.169122] env[62740]: DEBUG nova.compute.provider_tree [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1803.169606] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 249da596a7ed462ebc41bd5fc93af20e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.177059] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 249da596a7ed462ebc41bd5fc93af20e [ 1803.177946] env[62740]: DEBUG nova.scheduler.client.report [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1803.181026] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 9877ca943c3c49a19aca434db8019dad in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.193032] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9877ca943c3c49a19aca434db8019dad [ 1803.194079] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.399s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.194614] env[62740]: ERROR nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1803.194614] env[62740]: Faults: ['InvalidArgument'] [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Traceback (most recent call last): [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] self.driver.spawn(context, instance, image_meta, [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] self._fetch_image_if_missing(context, vi) [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] image_cache(vi, tmp_image_ds_loc) [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] vm_util.copy_virtual_disk( [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] session._wait_for_task(vmdk_copy_task) [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] return self.wait_for_task(task_ref) [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] return evt.wait() [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] result = hub.switch() [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] return self.greenlet.switch() [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] self.f(*self.args, **self.kw) [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] raise exceptions.translate_fault(task_info.error) [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Faults: ['InvalidArgument'] [ 1803.194614] env[62740]: ERROR nova.compute.manager [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] [ 1803.195521] env[62740]: DEBUG nova.compute.utils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1803.196401] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.264s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.196585] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.196764] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1803.197437] env[62740]: DEBUG nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Build of instance 07efd13e-40d0-4158-b17c-6f5c75474ce3 was re-scheduled: A specified parameter was not correct: fileType [ 1803.197437] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1803.197812] env[62740]: DEBUG nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1803.197984] env[62740]: DEBUG nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1803.198181] env[62740]: DEBUG nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1803.198346] env[62740]: DEBUG nova.network.neutron [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1803.200607] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75cb4353-3ec2-4856-b166-f87b42e531bd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.208518] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c327bee0-1d1c-41d4-8c71-51b09f885373 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.222307] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f3fc79-6ed8-4479-a64c-4c603992c376 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.228251] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42848b9b-91b5-4bbb-8f98-8c30062fa437 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.257409] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181690MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1803.257622] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.257750] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.258622] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0aebcb9f22f84b75a8358fc2e69107e1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.289513] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0aebcb9f22f84b75a8358fc2e69107e1 [ 1803.293356] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5bf615345d784edfaa7fb5fba000675c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.302197] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5bf615345d784edfaa7fb5fba000675c [ 1803.319342] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 3e651fe8a53b43b9a900ebc8550894a8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.329073] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e651fe8a53b43b9a900ebc8550894a8 [ 1803.329786] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 07efd13e-40d0-4158-b17c-6f5c75474ce3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1803.329933] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 388d71f2-b229-4666-a53d-d5b07e498eed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1803.330074] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c0daf074-eecb-4899-938f-477031efc6d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1803.330200] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 61fea037-aac3-47ef-aa6a-5dfa657d840d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1803.330320] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 86c57375-8328-4344-b228-2f1ce6efc71e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1803.330439] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a41506d2-33b2-40b8-badb-41312c7abbd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1803.330556] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3aa2858e-d422-408a-a83a-98382f971add actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1803.330673] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1803.330788] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1803.330903] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 9a595940-16ba-401a-922f-331cf87093c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1803.331397] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 559c217f29f847b788eeb81082b91e9d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.340683] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 559c217f29f847b788eeb81082b91e9d [ 1803.341322] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d2fb90b7-1618-4f07-8854-81566887a7cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1803.341763] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ce8935c4df704361bce362340bb2153f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.350641] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce8935c4df704361bce362340bb2153f [ 1803.351245] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 08197ee6-55de-40f8-8704-641c0614cad6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1803.351673] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg d144f32d69a749e1bad6b0bd64533900 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.360369] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d144f32d69a749e1bad6b0bd64533900 [ 1803.361251] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 19f27c16-45b3-47d8-acf0-18255844431f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1803.361834] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg fd3269ea70bc49edbd60f64d070c08c7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.370256] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd3269ea70bc49edbd60f64d070c08c7 [ 1803.370843] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4ee71d81-7d8b-42f8-a27c-b4645169fa3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1803.371287] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg cc018e998f8844cba8fafe0b34271922 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.380029] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc018e998f8844cba8fafe0b34271922 [ 1803.380606] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1803.380822] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1803.380980] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1803.520291] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 0fa4f5f538da4311a79af8ae56c958c6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.534738] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fa4f5f538da4311a79af8ae56c958c6 [ 1803.534738] env[62740]: DEBUG nova.network.neutron [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.534738] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg e46310ae97054499b60f2311ba5b9fec in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.550017] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e46310ae97054499b60f2311ba5b9fec [ 1803.550017] env[62740]: INFO nova.compute.manager [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Took 0.35 seconds to deallocate network for instance. [ 1803.550017] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg f670e8d63179452f88602479b153f178 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.602023] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f670e8d63179452f88602479b153f178 [ 1803.602023] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg c4382f6bdf7e4dd9814f73c8bb1374b9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.605518] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466f61a5-5a09-4148-8af4-cfdb1c4ac007 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.614973] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c23209-99db-46b9-876b-b1cd61bbf456 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.651483] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4382f6bdf7e4dd9814f73c8bb1374b9 [ 1803.655235] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f7b726-7f54-440e-b2fb-62c9ae972fee {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.666018] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40161a8-fde6-4821-a984-2f152123865a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.679409] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1803.679905] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg cd9da59d9624432a9d86be7700c21771 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.687031] env[62740]: INFO nova.scheduler.client.report [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Deleted allocations for instance 07efd13e-40d0-4158-b17c-6f5c75474ce3 [ 1803.690979] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd9da59d9624432a9d86be7700c21771 [ 1803.691809] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1803.694103] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8e759b28a27d4bdebf85a1fbb430c9d0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.698018] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 8f4570385e2c47fcae02f30057ac26a6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.707400] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e759b28a27d4bdebf85a1fbb430c9d0 [ 1803.708125] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1803.708315] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.451s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.760031] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f4570385e2c47fcae02f30057ac26a6 [ 1803.760669] env[62740]: DEBUG oslo_concurrency.lockutils [None req-46f3e8aa-7e4a-4afd-83d8-134d3ca1b0bb tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "07efd13e-40d0-4158-b17c-6f5c75474ce3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 579.509s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.761336] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg bd37a6836d904f84ac0e2288f8dabeb6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.764961] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "07efd13e-40d0-4158-b17c-6f5c75474ce3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 382.452s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.764961] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "07efd13e-40d0-4158-b17c-6f5c75474ce3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.764961] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "07efd13e-40d0-4158-b17c-6f5c75474ce3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.764961] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "07efd13e-40d0-4158-b17c-6f5c75474ce3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.765377] env[62740]: INFO nova.compute.manager [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Terminating instance [ 1803.767286] env[62740]: DEBUG nova.compute.manager [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1803.767439] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1803.767797] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c1f19c52-6a1d-42ef-8b1b-8118e3d9c9e7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.782016] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abec5794-669c-462c-9300-05aa1bff6634 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.789688] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd37a6836d904f84ac0e2288f8dabeb6 [ 1803.790278] env[62740]: DEBUG nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1803.792070] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 08cfad1953d440c9babd43297298fd98 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.812885] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 07efd13e-40d0-4158-b17c-6f5c75474ce3 could not be found. [ 1803.813109] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1803.813292] env[62740]: INFO nova.compute.manager [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1803.813547] env[62740]: DEBUG oslo.service.loopingcall [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1803.813785] env[62740]: DEBUG nova.compute.manager [-] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1803.813894] env[62740]: DEBUG nova.network.neutron [-] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1803.835191] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e5fe5294b80c48a5b1505680f2d5140a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.842411] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5fe5294b80c48a5b1505680f2d5140a [ 1803.842835] env[62740]: DEBUG nova.network.neutron [-] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.843263] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6bbcfb1641a34038a32d08436b821534 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.846807] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08cfad1953d440c9babd43297298fd98 [ 1803.853892] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bbcfb1641a34038a32d08436b821534 [ 1803.854618] env[62740]: INFO nova.compute.manager [-] [instance: 07efd13e-40d0-4158-b17c-6f5c75474ce3] Took 0.04 seconds to deallocate network for instance. [ 1803.859279] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 35613c7a8c234762b37fe4e52b4335d4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.867660] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.868158] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.869999] env[62740]: INFO nova.compute.claims [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1803.871854] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg fe6937c8bc0d4477a1abbd701c524e6c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.889174] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35613c7a8c234762b37fe4e52b4335d4 [ 1803.902634] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 7e994ca409834571ba49c786e035f63d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.905784] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe6937c8bc0d4477a1abbd701c524e6c [ 1803.907424] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg caee072b1d9e47bd8872d96f98e42f1f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.914455] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg caee072b1d9e47bd8872d96f98e42f1f [ 1803.942513] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e994ca409834571ba49c786e035f63d [ 1803.947677] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "07efd13e-40d0-4158-b17c-6f5c75474ce3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.186s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.948027] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6797b534-1fe7-430f-8c33-3945daa0959c tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 37ec7d9dbbec4e708f6eb09f0342c163 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1803.957761] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37ec7d9dbbec4e708f6eb09f0342c163 [ 1804.077966] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a20c5dc-84a2-4bbf-b9b6-499b88bcf459 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.086792] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e64a93b-a0bd-4872-aa40-8c475824b93a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.117338] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28cd3e1-e1dc-476e-aedc-467c7ae9c2d0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.124811] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6550625-461f-4f7b-a8c8-1590182a50ef {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.137866] env[62740]: DEBUG nova.compute.provider_tree [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1804.138356] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 1dba74049c9448fe888cd26d0c050bc7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1804.145521] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dba74049c9448fe888cd26d0c050bc7 [ 1804.146388] env[62740]: DEBUG nova.scheduler.client.report [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1804.148587] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 9300b7d7e534475bb4066078fdf22c71 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1804.159080] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9300b7d7e534475bb4066078fdf22c71 [ 1804.159761] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.292s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.160241] env[62740]: DEBUG nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1804.161837] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 81dc92e3987e4cb9a0f4d41429fcc36c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1804.192196] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81dc92e3987e4cb9a0f4d41429fcc36c [ 1804.192519] env[62740]: DEBUG nova.compute.utils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1804.193170] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg d44c66bb7f34421592c437dd4eb238e9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1804.193947] env[62740]: DEBUG nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1804.194178] env[62740]: DEBUG nova.network.neutron [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1804.202039] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d44c66bb7f34421592c437dd4eb238e9 [ 1804.202612] env[62740]: DEBUG nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1804.205059] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg da9bd2457cb74e588ff78cf6fb357da2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1804.232611] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da9bd2457cb74e588ff78cf6fb357da2 [ 1804.235697] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg a31457ee97b54dc29cf8a30f779d673b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1804.264631] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a31457ee97b54dc29cf8a30f779d673b [ 1804.265842] env[62740]: DEBUG nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1804.277719] env[62740]: DEBUG nova.policy [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '070a90ba779f4bc59053f8bffc95de94', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f735ac36a0d46269560f1209706fb69', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1804.291683] env[62740]: DEBUG nova.virt.hardware [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1804.291920] env[62740]: DEBUG nova.virt.hardware [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1804.292096] env[62740]: DEBUG nova.virt.hardware [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1804.292289] env[62740]: DEBUG nova.virt.hardware [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1804.292437] env[62740]: DEBUG nova.virt.hardware [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1804.292586] env[62740]: DEBUG nova.virt.hardware [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1804.292787] env[62740]: DEBUG nova.virt.hardware [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1804.292949] env[62740]: DEBUG nova.virt.hardware [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1804.293155] env[62740]: DEBUG nova.virt.hardware [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1804.293325] env[62740]: DEBUG nova.virt.hardware [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1804.293498] env[62740]: DEBUG nova.virt.hardware [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1804.294341] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f766e8-40a3-494b-a818-8d8a663cfeb7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.301963] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828665bb-ab00-4d70-8b5d-4d75c175cf86 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.609328] env[62740]: DEBUG nova.network.neutron [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Successfully created port: f9c05c64-ddc5-4895-a56d-3a5533ca4712 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1805.318691] env[62740]: DEBUG nova.network.neutron [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Successfully updated port: f9c05c64-ddc5-4895-a56d-3a5533ca4712 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1805.319579] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 9823eb41a55c4c27872c62cba3adf4a6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1805.330958] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9823eb41a55c4c27872c62cba3adf4a6 [ 1805.332193] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "refresh_cache-d2fb90b7-1618-4f07-8854-81566887a7cd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.332767] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquired lock "refresh_cache-d2fb90b7-1618-4f07-8854-81566887a7cd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.333053] env[62740]: DEBUG nova.network.neutron [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1805.333643] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 610d2c093ed942d7a8d2b67ef0d8baef in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1805.341226] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 610d2c093ed942d7a8d2b67ef0d8baef [ 1805.379907] env[62740]: DEBUG nova.network.neutron [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1805.560468] env[62740]: DEBUG nova.network.neutron [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Updating instance_info_cache with network_info: [{"id": "f9c05c64-ddc5-4895-a56d-3a5533ca4712", "address": "fa:16:3e:11:03:20", "network": {"id": "f10f5770-f866-413b-86ce-20c3a1473482", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1756365504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f735ac36a0d46269560f1209706fb69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9c05c64-dd", "ovs_interfaceid": "f9c05c64-ddc5-4895-a56d-3a5533ca4712", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.561041] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg d423868c162a462ea327dbad85240afc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1805.574840] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d423868c162a462ea327dbad85240afc [ 1805.576832] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Releasing lock "refresh_cache-d2fb90b7-1618-4f07-8854-81566887a7cd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.576832] env[62740]: DEBUG nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Instance network_info: |[{"id": "f9c05c64-ddc5-4895-a56d-3a5533ca4712", "address": "fa:16:3e:11:03:20", "network": {"id": "f10f5770-f866-413b-86ce-20c3a1473482", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1756365504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f735ac36a0d46269560f1209706fb69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9c05c64-dd", "ovs_interfaceid": "f9c05c64-ddc5-4895-a56d-3a5533ca4712", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1805.576832] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:03:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f9c05c64-ddc5-4895-a56d-3a5533ca4712', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1805.585064] env[62740]: DEBUG oslo.service.loopingcall [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1805.585064] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1805.585064] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0b75cc9-0ff8-46c4-93f5-5c02bf5e5fbb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.607566] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1805.607566] env[62740]: value = "task-640324" [ 1805.607566] env[62740]: _type = "Task" [ 1805.607566] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.617828] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640324, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.687302] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1805.890444] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1805.919028] env[62740]: DEBUG nova.compute.manager [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Received event network-vif-plugged-f9c05c64-ddc5-4895-a56d-3a5533ca4712 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1805.919268] env[62740]: DEBUG oslo_concurrency.lockutils [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] Acquiring lock "d2fb90b7-1618-4f07-8854-81566887a7cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.919557] env[62740]: DEBUG oslo_concurrency.lockutils [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] Lock "d2fb90b7-1618-4f07-8854-81566887a7cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.919683] env[62740]: DEBUG oslo_concurrency.lockutils [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] Lock "d2fb90b7-1618-4f07-8854-81566887a7cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.919855] env[62740]: DEBUG nova.compute.manager [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] No waiting events found dispatching network-vif-plugged-f9c05c64-ddc5-4895-a56d-3a5533ca4712 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1805.920156] env[62740]: WARNING nova.compute.manager [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Received unexpected event network-vif-plugged-f9c05c64-ddc5-4895-a56d-3a5533ca4712 for instance with vm_state building and task_state spawning. [ 1805.920409] env[62740]: DEBUG nova.compute.manager [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Received event network-changed-f9c05c64-ddc5-4895-a56d-3a5533ca4712 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1805.920649] env[62740]: DEBUG nova.compute.manager [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Refreshing instance network info cache due to event network-changed-f9c05c64-ddc5-4895-a56d-3a5533ca4712. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1805.920915] env[62740]: DEBUG oslo_concurrency.lockutils [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] Acquiring lock "refresh_cache-d2fb90b7-1618-4f07-8854-81566887a7cd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.921132] env[62740]: DEBUG oslo_concurrency.lockutils [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] Acquired lock "refresh_cache-d2fb90b7-1618-4f07-8854-81566887a7cd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.921348] env[62740]: DEBUG nova.network.neutron [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Refreshing network info cache for port f9c05c64-ddc5-4895-a56d-3a5533ca4712 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1805.921915] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] Expecting reply to msg 51f7417b1036493dba4ac6b3ba29a0b6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1805.930219] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51f7417b1036493dba4ac6b3ba29a0b6 [ 1806.117934] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640324, 'name': CreateVM_Task, 'duration_secs': 0.330812} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.118221] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1806.138846] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.139062] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.139415] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1806.139683] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef8367b5-d804-4abe-aa29-4c954a5176d1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.144822] env[62740]: DEBUG oslo_vmware.api [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for the task: (returnval){ [ 1806.144822] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]524b739b-77aa-8102-7949-8bf8520fd006" [ 1806.144822] env[62740]: _type = "Task" [ 1806.144822] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.152895] env[62740]: DEBUG oslo_vmware.api [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]524b739b-77aa-8102-7949-8bf8520fd006, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.224947] env[62740]: DEBUG nova.network.neutron [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Updated VIF entry in instance network info cache for port f9c05c64-ddc5-4895-a56d-3a5533ca4712. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1806.225336] env[62740]: DEBUG nova.network.neutron [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Updating instance_info_cache with network_info: [{"id": "f9c05c64-ddc5-4895-a56d-3a5533ca4712", "address": "fa:16:3e:11:03:20", "network": {"id": "f10f5770-f866-413b-86ce-20c3a1473482", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1756365504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f735ac36a0d46269560f1209706fb69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9c05c64-dd", "ovs_interfaceid": "f9c05c64-ddc5-4895-a56d-3a5533ca4712", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1806.225860] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] Expecting reply to msg 9a69b21d8db74944918b55d0259d1793 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1806.234414] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a69b21d8db74944918b55d0259d1793 [ 1806.235014] env[62740]: DEBUG oslo_concurrency.lockutils [req-59539d64-6ecb-40b1-8fba-e5f373f4b588 req-89aac7c5-da08-4d61-b4c7-8b287d4344bc service nova] Releasing lock "refresh_cache-d2fb90b7-1618-4f07-8854-81566887a7cd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.655725] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.656072] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1806.656249] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.202037] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_power_states {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1809.202344] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 61d3bf640fdc4827b2d1c14eb9c5a17f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1809.224933] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61d3bf640fdc4827b2d1c14eb9c5a17f [ 1809.227247] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Getting list of instances from cluster (obj){ [ 1809.227247] env[62740]: value = "domain-c8" [ 1809.227247] env[62740]: _type = "ClusterComputeResource" [ 1809.227247] env[62740]: } {{(pid=62740) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1809.228493] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a38eff2a-f234-41ba-9ea3-0ed20d76bcec {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.244922] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Got total of 10 instances {{(pid=62740) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1809.245174] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 388d71f2-b229-4666-a53d-d5b07e498eed {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1809.245398] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid c0daf074-eecb-4899-938f-477031efc6d1 {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1809.245577] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 61fea037-aac3-47ef-aa6a-5dfa657d840d {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1809.245744] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 86c57375-8328-4344-b228-2f1ce6efc71e {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1809.245904] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid a41506d2-33b2-40b8-badb-41312c7abbd2 {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1809.246078] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 3aa2858e-d422-408a-a83a-98382f971add {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1809.246236] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10 {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1809.246388] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67 {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1809.246553] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 9a595940-16ba-401a-922f-331cf87093c9 {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1809.246705] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid d2fb90b7-1618-4f07-8854-81566887a7cd {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1809.247050] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "388d71f2-b229-4666-a53d-d5b07e498eed" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.247287] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "c0daf074-eecb-4899-938f-477031efc6d1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.247502] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "61fea037-aac3-47ef-aa6a-5dfa657d840d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.247897] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "86c57375-8328-4344-b228-2f1ce6efc71e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.248173] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "a41506d2-33b2-40b8-badb-41312c7abbd2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.248399] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "3aa2858e-d422-408a-a83a-98382f971add" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.248692] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.248919] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.249146] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "9a595940-16ba-401a-922f-331cf87093c9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.249345] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "d2fb90b7-1618-4f07-8854-81566887a7cd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.165786] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg d9a4d6a78bf74f6698b43623a5becb39 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1813.176822] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9a4d6a78bf74f6698b43623a5becb39 [ 1813.177268] env[62740]: DEBUG oslo_concurrency.lockutils [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "9a595940-16ba-401a-922f-331cf87093c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.117208] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 0570480b460c462983f291a49f59b510 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1814.126101] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0570480b460c462983f291a49f59b510 [ 1814.126548] env[62740]: DEBUG oslo_concurrency.lockutils [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "d2fb90b7-1618-4f07-8854-81566887a7cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.567909] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "1b975b29-fbaa-4385-9bf9-33496b4ed129" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.567909] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "1b975b29-fbaa-4385-9bf9-33496b4ed129" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1850.191826] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 093a8aa71ee947a0811d2288eb47539b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1850.203858] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 093a8aa71ee947a0811d2288eb47539b [ 1851.898505] env[62740]: WARNING oslo_vmware.rw_handles [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1851.898505] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1851.898505] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1851.898505] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1851.898505] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1851.898505] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1851.898505] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1851.898505] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1851.898505] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1851.898505] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1851.898505] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1851.898505] env[62740]: ERROR oslo_vmware.rw_handles [ 1851.899039] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/6350c174-8630-4dcd-bbfa-1bcb4a2853ad/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1851.901305] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1851.901719] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Copying Virtual Disk [datastore2] vmware_temp/6350c174-8630-4dcd-bbfa-1bcb4a2853ad/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/6350c174-8630-4dcd-bbfa-1bcb4a2853ad/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1851.901899] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29fe1466-7b72-481a-ae48-4600a0279cdc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.910302] env[62740]: DEBUG oslo_vmware.api [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Waiting for the task: (returnval){ [ 1851.910302] env[62740]: value = "task-640325" [ 1851.910302] env[62740]: _type = "Task" [ 1851.910302] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.917772] env[62740]: DEBUG oslo_vmware.api [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Task: {'id': task-640325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.420399] env[62740]: DEBUG oslo_vmware.exceptions [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1852.420611] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.421224] env[62740]: ERROR nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1852.421224] env[62740]: Faults: ['InvalidArgument'] [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Traceback (most recent call last): [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] yield resources [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] self.driver.spawn(context, instance, image_meta, [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] self._fetch_image_if_missing(context, vi) [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] image_cache(vi, tmp_image_ds_loc) [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] vm_util.copy_virtual_disk( [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] session._wait_for_task(vmdk_copy_task) [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] return self.wait_for_task(task_ref) [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] return evt.wait() [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] result = hub.switch() [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] return self.greenlet.switch() [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] self.f(*self.args, **self.kw) [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] raise exceptions.translate_fault(task_info.error) [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Faults: ['InvalidArgument'] [ 1852.421224] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] [ 1852.421914] env[62740]: INFO nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Terminating instance [ 1852.423038] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.423252] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1852.423663] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b893cc0-cec7-4bb5-b68a-72fdd23e25b1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.425598] env[62740]: DEBUG nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1852.425928] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1852.426504] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e00e72-457e-49e3-aabc-fdfdec440a48 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.433118] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1852.433375] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4418fde-11fa-4fe2-8163-6c18dd631958 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.435362] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1852.435536] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1852.436500] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eebf6d64-8563-40c8-aa54-3f92c5445214 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.440985] env[62740]: DEBUG oslo_vmware.api [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for the task: (returnval){ [ 1852.440985] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5259fc1e-5cbf-af48-0187-31ec603e1111" [ 1852.440985] env[62740]: _type = "Task" [ 1852.440985] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.451051] env[62740]: DEBUG oslo_vmware.api [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5259fc1e-5cbf-af48-0187-31ec603e1111, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.501046] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1852.501312] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1852.501499] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Deleting the datastore file [datastore2] 388d71f2-b229-4666-a53d-d5b07e498eed {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1852.501771] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81d071fd-7d73-49f5-8b8a-a44cd147cd20 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.507563] env[62740]: DEBUG oslo_vmware.api [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Waiting for the task: (returnval){ [ 1852.507563] env[62740]: value = "task-640327" [ 1852.507563] env[62740]: _type = "Task" [ 1852.507563] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.516505] env[62740]: DEBUG oslo_vmware.api [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Task: {'id': task-640327, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.952110] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1852.952377] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Creating directory with path [datastore2] vmware_temp/29b7cfee-76e8-4325-abcb-00828a99c9f6/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1852.952611] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-305b62f3-3908-4879-a000-337cc2bc462f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.964062] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Created directory with path [datastore2] vmware_temp/29b7cfee-76e8-4325-abcb-00828a99c9f6/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1852.964062] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Fetch image to [datastore2] vmware_temp/29b7cfee-76e8-4325-abcb-00828a99c9f6/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1852.964062] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/29b7cfee-76e8-4325-abcb-00828a99c9f6/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1852.964311] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff75c13-f026-488f-a47d-e8860e7fa089 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.970558] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ec2518-796c-4bef-b708-5837fea1837f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.979340] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9743e51-01d7-4793-b757-9bbc84b3345f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.011550] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10742786-af41-4254-9764-5b8612fe574b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.018089] env[62740]: DEBUG oslo_vmware.api [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Task: {'id': task-640327, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066555} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.019424] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1853.019615] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1853.019792] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1853.019969] env[62740]: INFO nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1853.022087] env[62740]: DEBUG nova.compute.claims [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1853.022309] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.022550] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.024695] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 5f2b86e5c02845339ceb1a7509c9b863 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1853.025604] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a4c89350-dcc7-4360-bf58-5f68fe312263 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.045791] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1853.062718] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f2b86e5c02845339ceb1a7509c9b863 [ 1853.199175] env[62740]: DEBUG oslo_vmware.rw_handles [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/29b7cfee-76e8-4325-abcb-00828a99c9f6/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1853.260603] env[62740]: DEBUG oslo_vmware.rw_handles [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1853.260841] env[62740]: DEBUG oslo_vmware.rw_handles [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/29b7cfee-76e8-4325-abcb-00828a99c9f6/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1853.293849] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38968a7-2b48-40a0-b5cb-df1dc698cfab {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.301743] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14178e78-5d29-49d3-9740-6f26468c0f0e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.332169] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81311566-8d2f-4ca3-8a15-d851962a77c7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.339409] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264542a1-faa6-442a-aa54-3ab3f028ee6c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.352418] env[62740]: DEBUG nova.compute.provider_tree [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1853.352988] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 25969c4450c748c6bfcf4d6efb53738a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1853.361029] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25969c4450c748c6bfcf4d6efb53738a [ 1853.362073] env[62740]: DEBUG nova.scheduler.client.report [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1853.364531] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 3feea02b3bf948c3884379c41bfb2356 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1853.375363] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3feea02b3bf948c3884379c41bfb2356 [ 1853.376060] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.353s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.376580] env[62740]: ERROR nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1853.376580] env[62740]: Faults: ['InvalidArgument'] [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Traceback (most recent call last): [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] self.driver.spawn(context, instance, image_meta, [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] self._fetch_image_if_missing(context, vi) [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] image_cache(vi, tmp_image_ds_loc) [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] vm_util.copy_virtual_disk( [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] session._wait_for_task(vmdk_copy_task) [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] return self.wait_for_task(task_ref) [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] return evt.wait() [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] result = hub.switch() [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] return self.greenlet.switch() [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] self.f(*self.args, **self.kw) [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] raise exceptions.translate_fault(task_info.error) [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Faults: ['InvalidArgument'] [ 1853.376580] env[62740]: ERROR nova.compute.manager [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] [ 1853.377735] env[62740]: DEBUG nova.compute.utils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1853.378734] env[62740]: DEBUG nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Build of instance 388d71f2-b229-4666-a53d-d5b07e498eed was re-scheduled: A specified parameter was not correct: fileType [ 1853.378734] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1853.379126] env[62740]: DEBUG nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1853.379303] env[62740]: DEBUG nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1853.379475] env[62740]: DEBUG nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1853.379641] env[62740]: DEBUG nova.network.neutron [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1853.868413] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg ac2da484d3e64839a720ca9c68996b8e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1853.888534] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac2da484d3e64839a720ca9c68996b8e [ 1853.889395] env[62740]: DEBUG nova.network.neutron [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.890109] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg bd329e2ac28e4dae89895ab379fa3113 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1853.902085] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd329e2ac28e4dae89895ab379fa3113 [ 1853.902731] env[62740]: INFO nova.compute.manager [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Took 0.52 seconds to deallocate network for instance. [ 1853.905519] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 92c4bd3209774e0cb53c874a163a03e6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1853.944137] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92c4bd3209774e0cb53c874a163a03e6 [ 1853.948033] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 311c5502f5564bbb8a5b5977969445cc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1853.982349] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 311c5502f5564bbb8a5b5977969445cc [ 1854.009014] env[62740]: INFO nova.scheduler.client.report [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Deleted allocations for instance 388d71f2-b229-4666-a53d-d5b07e498eed [ 1854.020852] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 866c1f303cb443cfb3335d70c67dc7ca in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.048443] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 866c1f303cb443cfb3335d70c67dc7ca [ 1854.052396] env[62740]: DEBUG oslo_concurrency.lockutils [None req-97c22c01-6a47-48fd-88ac-2e74fcc93dc8 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Lock "388d71f2-b229-4666-a53d-d5b07e498eed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 609.611s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.052396] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-005425bd-3467-464a-8f5d-3242349a1bc5 tempest-ServersAaction247Test-424946737 tempest-ServersAaction247Test-424946737-project-member] Expecting reply to msg 80c8545fcb084e6fbfd6f48173e3c1e9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.052396] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Lock "388d71f2-b229-4666-a53d-d5b07e498eed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 413.082s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.052396] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Acquiring lock "388d71f2-b229-4666-a53d-d5b07e498eed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.052396] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Lock "388d71f2-b229-4666-a53d-d5b07e498eed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.052396] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Lock "388d71f2-b229-4666-a53d-d5b07e498eed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.052876] env[62740]: INFO nova.compute.manager [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Terminating instance [ 1854.056742] env[62740]: DEBUG nova.compute.manager [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1854.056948] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1854.057222] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-512d0df9-0557-4735-a8eb-0ff3c7ea9ae8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.059808] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80c8545fcb084e6fbfd6f48173e3c1e9 [ 1854.060237] env[62740]: DEBUG nova.compute.manager [None req-005425bd-3467-464a-8f5d-3242349a1bc5 tempest-ServersAaction247Test-424946737 tempest-ServersAaction247Test-424946737-project-member] [instance: 08197ee6-55de-40f8-8704-641c0614cad6] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1854.061805] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-005425bd-3467-464a-8f5d-3242349a1bc5 tempest-ServersAaction247Test-424946737 tempest-ServersAaction247Test-424946737-project-member] Expecting reply to msg 5b78decd91d448c38b8830cc7c52efb0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.069837] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb19bd5c-8716-44d6-be80-8fec2e19dadb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.084153] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b78decd91d448c38b8830cc7c52efb0 [ 1854.084779] env[62740]: DEBUG nova.compute.manager [None req-005425bd-3467-464a-8f5d-3242349a1bc5 tempest-ServersAaction247Test-424946737 tempest-ServersAaction247Test-424946737-project-member] [instance: 08197ee6-55de-40f8-8704-641c0614cad6] Instance disappeared before build. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1854.085240] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-005425bd-3467-464a-8f5d-3242349a1bc5 tempest-ServersAaction247Test-424946737 tempest-ServersAaction247Test-424946737-project-member] Expecting reply to msg e4f9688d43a847b7bb397261cbd4cc78 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.100597] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 388d71f2-b229-4666-a53d-d5b07e498eed could not be found. [ 1854.100790] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1854.100969] env[62740]: INFO nova.compute.manager [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1854.101248] env[62740]: DEBUG oslo.service.loopingcall [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1854.101469] env[62740]: DEBUG nova.compute.manager [-] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1854.101567] env[62740]: DEBUG nova.network.neutron [-] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1854.103929] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4f9688d43a847b7bb397261cbd4cc78 [ 1854.117184] env[62740]: DEBUG oslo_concurrency.lockutils [None req-005425bd-3467-464a-8f5d-3242349a1bc5 tempest-ServersAaction247Test-424946737 tempest-ServersAaction247Test-424946737-project-member] Lock "08197ee6-55de-40f8-8704-641c0614cad6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.720s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.117790] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 4fe95a950deb4af397d4afe9d9034014 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.120882] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 43bd0831b70143a8ac1389d591109115 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.126529] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43bd0831b70143a8ac1389d591109115 [ 1854.126853] env[62740]: DEBUG nova.network.neutron [-] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.127225] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ea54897b9d7f40159d20135ac982e535 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.132037] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fe95a950deb4af397d4afe9d9034014 [ 1854.132479] env[62740]: DEBUG nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1854.134415] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 2ec0c260102b4bea8128c37bbc4b5f4a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.135419] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea54897b9d7f40159d20135ac982e535 [ 1854.135836] env[62740]: INFO nova.compute.manager [-] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] Took 0.03 seconds to deallocate network for instance. [ 1854.139240] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 27a3f4f8f80d416488cc00aba169a622 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.161773] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ec0c260102b4bea8128c37bbc4b5f4a [ 1854.167624] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27a3f4f8f80d416488cc00aba169a622 [ 1854.176323] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.176534] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.177980] env[62740]: INFO nova.compute.claims [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1854.179553] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 7ebd8eb79bba4d7d8772fb075ecfc4fa in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.183361] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg adb59d0c3aad4454a74cf6bf4e5e197b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.211698] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ebd8eb79bba4d7d8772fb075ecfc4fa [ 1854.213410] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 406ca191e4a34d12b2834f85c261a18c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.221890] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg adb59d0c3aad4454a74cf6bf4e5e197b [ 1854.222696] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 406ca191e4a34d12b2834f85c261a18c [ 1854.227444] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Lock "388d71f2-b229-4666-a53d-d5b07e498eed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.177s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.227751] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c5df9f74-aa14-47d0-9db1-90cef485b440 tempest-AttachInterfacesUnderV243Test-1765719672 tempest-AttachInterfacesUnderV243Test-1765719672-project-member] Expecting reply to msg 9063fefaaf974ce5a59aa42c8458bedb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.228765] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "388d71f2-b229-4666-a53d-d5b07e498eed" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 44.982s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.228950] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 388d71f2-b229-4666-a53d-d5b07e498eed] During sync_power_state the instance has a pending task (deleting). Skip. [ 1854.229131] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "388d71f2-b229-4666-a53d-d5b07e498eed" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.238123] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9063fefaaf974ce5a59aa42c8458bedb [ 1854.373153] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0c3a98-0c34-49a2-95d8-da26740d12ab {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.381154] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3948ee95-5011-418a-b634-f7c9fa7e4961 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.409957] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125c389e-2ebe-4c32-a801-8c04f501c7bb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.417169] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c144e64f-b0e8-4a9f-a0d3-78e0ccf47dd7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.429903] env[62740]: DEBUG nova.compute.provider_tree [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1854.430403] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg e60d9e3c3cdb48e09ed4ef28b964c20f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.437358] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e60d9e3c3cdb48e09ed4ef28b964c20f [ 1854.438228] env[62740]: DEBUG nova.scheduler.client.report [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1854.440514] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 85273650a2dd43dd855042944069624f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.451337] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85273650a2dd43dd855042944069624f [ 1854.451978] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.275s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.452435] env[62740]: DEBUG nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1854.454005] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg c7a0c71adc504beb8e8d404155fc00ec in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.481701] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7a0c71adc504beb8e8d404155fc00ec [ 1854.484838] env[62740]: DEBUG nova.compute.utils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1854.485499] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 5edc0d7d3957439199a699fab1491551 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.486564] env[62740]: DEBUG nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1854.486860] env[62740]: DEBUG nova.network.neutron [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1854.501879] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5edc0d7d3957439199a699fab1491551 [ 1854.502494] env[62740]: DEBUG nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1854.504260] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 239ab16e95444a11a70ce379d0b9494e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.545167] env[62740]: DEBUG nova.policy [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5ffacac77ac4eb7bdf39ad844d7e9fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ed84bed92114277a01823c6efe27eb1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1854.546989] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 239ab16e95444a11a70ce379d0b9494e [ 1854.549864] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 85eb9e2cb3084425bf8943ecb5e9b36d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1854.577436] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85eb9e2cb3084425bf8943ecb5e9b36d [ 1854.578537] env[62740]: DEBUG nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1854.604246] env[62740]: DEBUG nova.virt.hardware [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1854.604455] env[62740]: DEBUG nova.virt.hardware [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1854.604618] env[62740]: DEBUG nova.virt.hardware [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1854.604803] env[62740]: DEBUG nova.virt.hardware [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1854.604956] env[62740]: DEBUG nova.virt.hardware [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1854.605115] env[62740]: DEBUG nova.virt.hardware [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1854.605320] env[62740]: DEBUG nova.virt.hardware [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1854.605479] env[62740]: DEBUG nova.virt.hardware [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1854.605646] env[62740]: DEBUG nova.virt.hardware [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1854.605809] env[62740]: DEBUG nova.virt.hardware [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1854.605982] env[62740]: DEBUG nova.virt.hardware [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1854.606933] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f109ea84-f24b-459c-97a8-755575acca67 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.614713] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05383fe6-9c01-40cf-8731-b8f17155d4ca {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.879416] env[62740]: DEBUG nova.network.neutron [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Successfully created port: f622f4b0-13e8-4397-b6c6-3012ce0124d6 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1855.633453] env[62740]: DEBUG nova.network.neutron [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Successfully updated port: f622f4b0-13e8-4397-b6c6-3012ce0124d6 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1855.633453] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 43c87b475faa4a0ca78c638f4b0582f6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1855.642572] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43c87b475faa4a0ca78c638f4b0582f6 [ 1855.643435] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Acquiring lock "refresh_cache-19f27c16-45b3-47d8-acf0-18255844431f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1855.643435] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Acquired lock "refresh_cache-19f27c16-45b3-47d8-acf0-18255844431f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1855.643435] env[62740]: DEBUG nova.network.neutron [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1855.643584] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg c6ef7c891d034502a59fcc5d3436edeb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1855.651744] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6ef7c891d034502a59fcc5d3436edeb [ 1855.684182] env[62740]: DEBUG nova.network.neutron [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1855.858989] env[62740]: DEBUG nova.network.neutron [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Updating instance_info_cache with network_info: [{"id": "f622f4b0-13e8-4397-b6c6-3012ce0124d6", "address": "fa:16:3e:d3:84:6b", "network": {"id": "f4ce482d-48ef-4299-8202-454d9887f872", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-281381620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ed84bed92114277a01823c6efe27eb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf622f4b0-13", "ovs_interfaceid": "f622f4b0-13e8-4397-b6c6-3012ce0124d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1855.859511] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 345722135416481ca54816e7090512b7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1855.869940] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 345722135416481ca54816e7090512b7 [ 1855.870482] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Releasing lock "refresh_cache-19f27c16-45b3-47d8-acf0-18255844431f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.870749] env[62740]: DEBUG nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Instance network_info: |[{"id": "f622f4b0-13e8-4397-b6c6-3012ce0124d6", "address": "fa:16:3e:d3:84:6b", "network": {"id": "f4ce482d-48ef-4299-8202-454d9887f872", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-281381620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ed84bed92114277a01823c6efe27eb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf622f4b0-13", "ovs_interfaceid": "f622f4b0-13e8-4397-b6c6-3012ce0124d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1855.871190] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:84:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd2f5e5e2-e460-49ce-aa24-232e4a8007af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f622f4b0-13e8-4397-b6c6-3012ce0124d6', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1855.878611] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Creating folder: Project (5ed84bed92114277a01823c6efe27eb1). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1855.879131] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ec590516-9219-4735-b306-2babc1c03985 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.891318] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Created folder: Project (5ed84bed92114277a01823c6efe27eb1) in parent group-v156037. [ 1855.891486] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Creating folder: Instances. Parent ref: group-v156176. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1855.891685] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b3e6a14-a6fb-4f37-82aa-472c93af202b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.899399] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Created folder: Instances in parent group-v156176. [ 1855.899614] env[62740]: DEBUG oslo.service.loopingcall [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1855.899825] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1855.900066] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-acc60e44-3247-4c21-881e-b5693968a29f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.919440] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1855.919440] env[62740]: value = "task-640330" [ 1855.919440] env[62740]: _type = "Task" [ 1855.919440] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.924119] env[62740]: DEBUG nova.compute.manager [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Received event network-vif-plugged-f622f4b0-13e8-4397-b6c6-3012ce0124d6 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1855.924312] env[62740]: DEBUG oslo_concurrency.lockutils [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] Acquiring lock "19f27c16-45b3-47d8-acf0-18255844431f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.924518] env[62740]: DEBUG oslo_concurrency.lockutils [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] Lock "19f27c16-45b3-47d8-acf0-18255844431f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.924686] env[62740]: DEBUG oslo_concurrency.lockutils [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] Lock "19f27c16-45b3-47d8-acf0-18255844431f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.924855] env[62740]: DEBUG nova.compute.manager [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] No waiting events found dispatching network-vif-plugged-f622f4b0-13e8-4397-b6c6-3012ce0124d6 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1855.925029] env[62740]: WARNING nova.compute.manager [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Received unexpected event network-vif-plugged-f622f4b0-13e8-4397-b6c6-3012ce0124d6 for instance with vm_state building and task_state spawning. [ 1855.925198] env[62740]: DEBUG nova.compute.manager [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Received event network-changed-f622f4b0-13e8-4397-b6c6-3012ce0124d6 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1855.925352] env[62740]: DEBUG nova.compute.manager [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Refreshing instance network info cache due to event network-changed-f622f4b0-13e8-4397-b6c6-3012ce0124d6. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1855.925539] env[62740]: DEBUG oslo_concurrency.lockutils [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] Acquiring lock "refresh_cache-19f27c16-45b3-47d8-acf0-18255844431f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1855.925675] env[62740]: DEBUG oslo_concurrency.lockutils [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] Acquired lock "refresh_cache-19f27c16-45b3-47d8-acf0-18255844431f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1855.925855] env[62740]: DEBUG nova.network.neutron [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Refreshing network info cache for port f622f4b0-13e8-4397-b6c6-3012ce0124d6 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1855.926340] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] Expecting reply to msg 11c6f08f974b41cab098cbddbae10bbf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1855.932508] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640330, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.933810] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11c6f08f974b41cab098cbddbae10bbf [ 1856.429796] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640330, 'name': CreateVM_Task, 'duration_secs': 0.283201} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.433141] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1856.433141] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.433141] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.433141] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1856.433141] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7664d4ab-3b58-4559-abf9-b8e8d565070e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.437131] env[62740]: DEBUG oslo_vmware.api [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Waiting for the task: (returnval){ [ 1856.437131] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]522ad7b9-e458-6e16-8678-c608eebb10b4" [ 1856.437131] env[62740]: _type = "Task" [ 1856.437131] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.446868] env[62740]: DEBUG oslo_vmware.api [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]522ad7b9-e458-6e16-8678-c608eebb10b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.452835] env[62740]: DEBUG nova.network.neutron [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Updated VIF entry in instance network info cache for port f622f4b0-13e8-4397-b6c6-3012ce0124d6. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1856.453310] env[62740]: DEBUG nova.network.neutron [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Updating instance_info_cache with network_info: [{"id": "f622f4b0-13e8-4397-b6c6-3012ce0124d6", "address": "fa:16:3e:d3:84:6b", "network": {"id": "f4ce482d-48ef-4299-8202-454d9887f872", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-281381620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ed84bed92114277a01823c6efe27eb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf622f4b0-13", "ovs_interfaceid": "f622f4b0-13e8-4397-b6c6-3012ce0124d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1856.454161] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] Expecting reply to msg cb40a6e161b04c7fa6d566bd9357523a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1856.463241] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb40a6e161b04c7fa6d566bd9357523a [ 1856.463852] env[62740]: DEBUG oslo_concurrency.lockutils [req-84d3d844-f6e9-4707-af67-4e437cbb4b47 req-52f1193e-7261-4ff4-81c4-a2fb9bccd0f6 service nova] Releasing lock "refresh_cache-19f27c16-45b3-47d8-acf0-18255844431f" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.949948] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.949948] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1856.949948] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.938846] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1859.939133] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1860.890884] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1860.891092] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1860.891178] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1860.891770] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg bc9fcdcb08144af0934ccdadab384e55 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1860.912364] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc9fcdcb08144af0934ccdadab384e55 [ 1860.914628] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1860.914781] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1860.914914] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1860.915171] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1860.915345] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1860.915476] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1860.915600] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1860.915719] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1860.915840] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1860.915957] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1860.916087] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1862.890823] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1862.891119] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1862.891275] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1862.891422] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1864.689320] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 409672f480054b099d62b9a059658161 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1864.703412] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 409672f480054b099d62b9a059658161 [ 1864.703912] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Acquiring lock "19f27c16-45b3-47d8-acf0-18255844431f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.890467] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1864.890847] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 40cec01a0baf4dd393f44b2b3ba30ed7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1864.901928] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40cec01a0baf4dd393f44b2b3ba30ed7 [ 1864.903009] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.903246] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.903418] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.903578] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1864.905448] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31dd7c32-4b3d-4b65-8a51-520c3497450e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.913830] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e8507f-7306-4f74-b561-9dec84b6bda0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.927447] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8151fd7f-35b7-480c-8cad-d2963e1358eb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.933567] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e117b93-d4d3-4946-a67b-fc484e7b92d3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.961652] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181687MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1864.961808] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.962020] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.962847] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg d3e81d3d364f42ba9c439d1bac7cb7e1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1864.997314] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3e81d3d364f42ba9c439d1bac7cb7e1 [ 1865.001674] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 01e75542ced44bc9ae97d48ecdf4f023 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1865.011213] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01e75542ced44bc9ae97d48ecdf4f023 [ 1865.033700] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c0daf074-eecb-4899-938f-477031efc6d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1865.033859] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 61fea037-aac3-47ef-aa6a-5dfa657d840d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1865.033990] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 86c57375-8328-4344-b228-2f1ce6efc71e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1865.034134] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a41506d2-33b2-40b8-badb-41312c7abbd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1865.034256] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3aa2858e-d422-408a-a83a-98382f971add actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1865.034374] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1865.034776] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1865.034776] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 9a595940-16ba-401a-922f-331cf87093c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1865.034776] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d2fb90b7-1618-4f07-8854-81566887a7cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1865.034875] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 19f27c16-45b3-47d8-acf0-18255844431f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1865.035371] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg a64ee09b9d894c15bd81c49f8be3c236 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1865.045954] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a64ee09b9d894c15bd81c49f8be3c236 [ 1865.046772] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4ee71d81-7d8b-42f8-a27c-b4645169fa3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1865.047279] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 02cefa157090494db9727ad6db768da6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1865.057029] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02cefa157090494db9727ad6db768da6 [ 1865.057231] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1865.057706] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ea3c611565ac40c4b67fe4160b51e272 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1865.066862] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea3c611565ac40c4b67fe4160b51e272 [ 1865.067522] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1b975b29-fbaa-4385-9bf9-33496b4ed129 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1865.067779] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1865.067934] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1865.215614] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9abfdfd-7ff5-4fc3-88f8-f36e5372ad06 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.223868] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85781304-5583-452c-a2c7-44014bc5f765 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.252397] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7c7a94-6db0-4c64-ac12-8be3f7d6ed21 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.258999] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b056adca-867f-41f8-9cec-5abca8247e1a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.271469] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1865.271925] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0b71bf5577064e2393a6c8c8886edcc9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1865.280701] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b71bf5577064e2393a6c8c8886edcc9 [ 1865.281605] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1865.283886] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 98af4baabc4d4a1a90a50793ca0049b0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1865.297482] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98af4baabc4d4a1a90a50793ca0049b0 [ 1865.298151] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1865.298424] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.336s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.298932] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1866.891360] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1870.888217] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1870.889029] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 4dc7e20be3524156bc1cd9d9cac2bf72 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1870.906472] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4dc7e20be3524156bc1cd9d9cac2bf72 [ 1902.532775] env[62740]: WARNING oslo_vmware.rw_handles [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1902.532775] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1902.532775] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1902.532775] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1902.532775] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1902.532775] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1902.532775] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1902.532775] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1902.532775] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1902.532775] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1902.532775] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1902.532775] env[62740]: ERROR oslo_vmware.rw_handles [ 1902.533431] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/29b7cfee-76e8-4325-abcb-00828a99c9f6/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1902.535843] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1902.536177] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Copying Virtual Disk [datastore2] vmware_temp/29b7cfee-76e8-4325-abcb-00828a99c9f6/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/29b7cfee-76e8-4325-abcb-00828a99c9f6/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1902.536543] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0031f12-0c10-4188-aef9-f36504aa6f47 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.547138] env[62740]: DEBUG oslo_vmware.api [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for the task: (returnval){ [ 1902.547138] env[62740]: value = "task-640331" [ 1902.547138] env[62740]: _type = "Task" [ 1902.547138] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.554465] env[62740]: DEBUG oslo_vmware.api [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Task: {'id': task-640331, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.057125] env[62740]: DEBUG oslo_vmware.exceptions [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1903.057372] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.057961] env[62740]: ERROR nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1903.057961] env[62740]: Faults: ['InvalidArgument'] [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] Traceback (most recent call last): [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] yield resources [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] self.driver.spawn(context, instance, image_meta, [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] self._fetch_image_if_missing(context, vi) [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] image_cache(vi, tmp_image_ds_loc) [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] vm_util.copy_virtual_disk( [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] session._wait_for_task(vmdk_copy_task) [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] return self.wait_for_task(task_ref) [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] return evt.wait() [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] result = hub.switch() [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] return self.greenlet.switch() [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] self.f(*self.args, **self.kw) [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] raise exceptions.translate_fault(task_info.error) [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] Faults: ['InvalidArgument'] [ 1903.057961] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] [ 1903.058951] env[62740]: INFO nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Terminating instance [ 1903.060025] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1903.060135] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1903.060369] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbffcbfc-415e-43b8-b711-80e7cf531f90 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.062945] env[62740]: DEBUG nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1903.062945] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1903.063517] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd65cafd-d02e-40fe-92a6-ca5550e7dc85 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.070383] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1903.070631] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67b41a1f-b19d-4bb5-ac56-46ec881c6bda {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.072805] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1903.072976] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1903.074029] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b2c3710-8f46-4c85-a7f9-4c091bbd9b68 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.079056] env[62740]: DEBUG oslo_vmware.api [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Waiting for the task: (returnval){ [ 1903.079056] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5269c097-4e7b-ae05-0e72-9fff99c9eeb4" [ 1903.079056] env[62740]: _type = "Task" [ 1903.079056] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.086461] env[62740]: DEBUG oslo_vmware.api [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5269c097-4e7b-ae05-0e72-9fff99c9eeb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.136713] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1903.136713] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1903.137338] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Deleting the datastore file [datastore2] c0daf074-eecb-4899-938f-477031efc6d1 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1903.137338] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d72c83af-c79b-4e96-b044-6ddd92ffc512 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.143410] env[62740]: DEBUG oslo_vmware.api [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for the task: (returnval){ [ 1903.143410] env[62740]: value = "task-640333" [ 1903.143410] env[62740]: _type = "Task" [ 1903.143410] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.150873] env[62740]: DEBUG oslo_vmware.api [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Task: {'id': task-640333, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.588955] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1903.589337] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Creating directory with path [datastore2] vmware_temp/0848359e-4982-4810-934e-7914d0f02eaf/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1903.589453] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47f5542a-a258-4a96-813c-c78f244aeb06 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.600417] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Created directory with path [datastore2] vmware_temp/0848359e-4982-4810-934e-7914d0f02eaf/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1903.600598] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Fetch image to [datastore2] vmware_temp/0848359e-4982-4810-934e-7914d0f02eaf/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1903.600779] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/0848359e-4982-4810-934e-7914d0f02eaf/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1903.601492] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1ddb81-56a7-4a16-b963-6c2befa4078b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.607585] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e413a384-2a7c-47fa-9bbc-899aa351f63c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.616136] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979b59a7-54f8-4a9d-80f8-21e655ee43a0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.648976] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8aeb0b4-a2b1-451c-ad9c-4b60fc0ab06a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.655545] env[62740]: DEBUG oslo_vmware.api [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Task: {'id': task-640333, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076692} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.656925] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1903.657131] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1903.657308] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1903.657484] env[62740]: INFO nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1903.659212] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-46308b8e-34b6-4f81-a7a0-0e339e917e92 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.660998] env[62740]: DEBUG nova.compute.claims [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1903.661184] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1903.661404] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.663386] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg c80f86325f66456088b16c4939709f1c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1903.681509] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1903.700886] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c80f86325f66456088b16c4939709f1c [ 1903.784227] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.785821] env[62740]: ERROR nova.compute.manager [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 174f7655-3fb8-458a-8e9c-108936afe738. [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Traceback (most recent call last): [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] result = getattr(controller, method)(*args, **kwargs) [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self._get(image_id) [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] resp, body = self.http_client.get(url, headers=header) [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self.request(url, 'GET', **kwargs) [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self._handle_response(resp) [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] raise exc.from_response(resp, resp.content) [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] During handling of the above exception, another exception occurred: [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Traceback (most recent call last): [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] yield resources [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] self.driver.spawn(context, instance, image_meta, [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] self._fetch_image_if_missing(context, vi) [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] image_fetch(context, vi, tmp_image_ds_loc) [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] images.fetch_image( [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1903.785821] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] metadata = IMAGE_API.get(context, image_ref) [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return session.show(context, image_id, [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] _reraise_translated_image_exception(image_id) [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] raise new_exc.with_traceback(exc_trace) [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] result = getattr(controller, method)(*args, **kwargs) [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self._get(image_id) [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] resp, body = self.http_client.get(url, headers=header) [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self.request(url, 'GET', **kwargs) [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self._handle_response(resp) [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] raise exc.from_response(resp, resp.content) [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] nova.exception.ImageNotAuthorized: Not authorized for image 174f7655-3fb8-458a-8e9c-108936afe738. [ 1903.786858] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] [ 1903.786858] env[62740]: INFO nova.compute.manager [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Terminating instance [ 1903.787880] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1903.787951] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1903.788417] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquiring lock "refresh_cache-61fea037-aac3-47ef-aa6a-5dfa657d840d" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1903.788573] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquired lock "refresh_cache-61fea037-aac3-47ef-aa6a-5dfa657d840d" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1903.788769] env[62740]: DEBUG nova.network.neutron [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1903.789239] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg d681ff565d054e239acf541e551c5344 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1903.789844] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cbdc9f45-c1c8-4429-aab3-710af481715c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.796628] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d681ff565d054e239acf541e551c5344 [ 1903.800877] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1903.801063] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1903.801993] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3201f7f-6b2f-4f9b-bf61-4fc6240f61ad {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.809166] env[62740]: DEBUG oslo_vmware.api [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 1903.809166] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52743afc-6bdb-ec13-83c8-0c6700fe49f1" [ 1903.809166] env[62740]: _type = "Task" [ 1903.809166] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.816997] env[62740]: DEBUG oslo_vmware.api [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52743afc-6bdb-ec13-83c8-0c6700fe49f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.820641] env[62740]: DEBUG nova.network.neutron [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1903.863578] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4047f92c-f612-4c9a-8019-cee349bed784 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.871170] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d0dff2-c091-43bb-8e83-691aabe1fa11 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.900893] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686bab93-cf92-4dd8-9a66-34a4d323c453 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.908399] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf383112-5e1e-49af-99cf-4c58796966a7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.924252] env[62740]: DEBUG nova.compute.provider_tree [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1903.924767] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg cedfd2af16ec49b28d65a4698f7e70ee in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1903.934146] env[62740]: DEBUG nova.network.neutron [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.934614] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 0b3df4976cc944b0a97e0a14b952bce3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1903.938076] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cedfd2af16ec49b28d65a4698f7e70ee [ 1903.939041] env[62740]: DEBUG nova.scheduler.client.report [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1903.941436] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 197a3384638945c8a5c4ed28d1179c31 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1903.943774] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b3df4976cc944b0a97e0a14b952bce3 [ 1903.944163] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Releasing lock "refresh_cache-61fea037-aac3-47ef-aa6a-5dfa657d840d" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.944536] env[62740]: DEBUG nova.compute.manager [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1903.944722] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1903.945614] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10970f4-05aa-4fa9-8d3b-124d7ae28862 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.953148] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1903.953528] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89e4d6b7-5def-4d97-b151-d9e5a20970c2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.955144] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 197a3384638945c8a5c4ed28d1179c31 [ 1903.955928] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.294s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.956396] env[62740]: ERROR nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1903.956396] env[62740]: Faults: ['InvalidArgument'] [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] Traceback (most recent call last): [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] self.driver.spawn(context, instance, image_meta, [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] self._fetch_image_if_missing(context, vi) [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] image_cache(vi, tmp_image_ds_loc) [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] vm_util.copy_virtual_disk( [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] session._wait_for_task(vmdk_copy_task) [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] return self.wait_for_task(task_ref) [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] return evt.wait() [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] result = hub.switch() [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] return self.greenlet.switch() [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] self.f(*self.args, **self.kw) [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] raise exceptions.translate_fault(task_info.error) [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] Faults: ['InvalidArgument'] [ 1903.956396] env[62740]: ERROR nova.compute.manager [instance: c0daf074-eecb-4899-938f-477031efc6d1] [ 1903.957137] env[62740]: DEBUG nova.compute.utils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1903.958909] env[62740]: DEBUG nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Build of instance c0daf074-eecb-4899-938f-477031efc6d1 was re-scheduled: A specified parameter was not correct: fileType [ 1903.958909] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1903.959312] env[62740]: DEBUG nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1903.959485] env[62740]: DEBUG nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1903.959700] env[62740]: DEBUG nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1903.959833] env[62740]: DEBUG nova.network.neutron [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1903.986775] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1903.987000] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1903.987201] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Deleting the datastore file [datastore2] 61fea037-aac3-47ef-aa6a-5dfa657d840d {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1903.987452] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-471e6ccf-a3c8-47fb-9c73-d4c515f7996a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.993536] env[62740]: DEBUG oslo_vmware.api [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Waiting for the task: (returnval){ [ 1903.993536] env[62740]: value = "task-640335" [ 1903.993536] env[62740]: _type = "Task" [ 1903.993536] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.002290] env[62740]: DEBUG oslo_vmware.api [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Task: {'id': task-640335, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.320277] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1904.320549] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating directory with path [datastore2] vmware_temp/1b05221f-daa7-4273-87fb-119eff6b910e/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1904.320787] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dcdd54bb-4a96-431e-8400-6679053db6c0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.332526] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Created directory with path [datastore2] vmware_temp/1b05221f-daa7-4273-87fb-119eff6b910e/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1904.332737] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Fetch image to [datastore2] vmware_temp/1b05221f-daa7-4273-87fb-119eff6b910e/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1904.332923] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/1b05221f-daa7-4273-87fb-119eff6b910e/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1904.333687] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd7b85f-dec6-45fe-8d38-3eb98587ebe3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.340538] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4eed2d-b9e9-4023-8103-c27f09ff9df5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.350766] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8216c9-3539-4928-a68a-da86cb194ed0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.382349] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b10f1e-a502-4b0e-9b6e-00903ec21513 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.385956] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg b55d6788e71c40a8b18f51ca2f0b6868 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.390390] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-92a4cb6f-3770-4c34-86bc-a647051f34d0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.396360] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b55d6788e71c40a8b18f51ca2f0b6868 [ 1904.396360] env[62740]: DEBUG nova.network.neutron [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.396841] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 3947b818202441998d06b792cf376580 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.405883] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3947b818202441998d06b792cf376580 [ 1904.408384] env[62740]: INFO nova.compute.manager [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Took 0.45 seconds to deallocate network for instance. [ 1904.411026] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 116cbc569b6a47a989992e4e960abc71 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.413789] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1904.447369] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 116cbc569b6a47a989992e4e960abc71 [ 1904.450244] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg e54dea8555da40678487f847cae47da4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.471769] env[62740]: DEBUG oslo_vmware.rw_handles [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1b05221f-daa7-4273-87fb-119eff6b910e/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1904.533256] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e54dea8555da40678487f847cae47da4 [ 1904.542782] env[62740]: DEBUG oslo_vmware.rw_handles [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1904.542973] env[62740]: DEBUG oslo_vmware.rw_handles [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1b05221f-daa7-4273-87fb-119eff6b910e/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1904.548218] env[62740]: DEBUG oslo_vmware.api [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Task: {'id': task-640335, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.040371} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.548427] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1904.548618] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1904.548837] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1904.549069] env[62740]: INFO nova.compute.manager [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1904.549300] env[62740]: DEBUG oslo.service.loopingcall [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1904.549542] env[62740]: DEBUG nova.compute.manager [-] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Skipping network deallocation for instance since networking was not requested. {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1904.551772] env[62740]: DEBUG nova.compute.claims [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1904.551950] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.552308] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.554137] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 01a40c3d899e49d893405480c3bc45bf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.563905] env[62740]: INFO nova.scheduler.client.report [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Deleted allocations for instance c0daf074-eecb-4899-938f-477031efc6d1 [ 1904.569944] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 2322a6eea197491e9b9ce4f4385555ad in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.588283] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2322a6eea197491e9b9ce4f4385555ad [ 1904.588813] env[62740]: DEBUG oslo_concurrency.lockutils [None req-1fd667ba-0e5c-48cd-b129-1e6068ba7839 tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "c0daf074-eecb-4899-938f-477031efc6d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.878s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.589558] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 67d579b94c8e406b8393781c835464bb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.590520] env[62740]: DEBUG oslo_concurrency.lockutils [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "c0daf074-eecb-4899-938f-477031efc6d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 437.219s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.590814] env[62740]: DEBUG oslo_concurrency.lockutils [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Acquiring lock "c0daf074-eecb-4899-938f-477031efc6d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.591145] env[62740]: DEBUG oslo_concurrency.lockutils [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "c0daf074-eecb-4899-938f-477031efc6d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.591390] env[62740]: DEBUG oslo_concurrency.lockutils [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "c0daf074-eecb-4899-938f-477031efc6d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.593247] env[62740]: INFO nova.compute.manager [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Terminating instance [ 1904.595193] env[62740]: DEBUG nova.compute.manager [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1904.595434] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1904.596042] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01a40c3d899e49d893405480c3bc45bf [ 1904.596513] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-da3014cf-c93e-4d2a-b30a-0739c9b97320 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.602106] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67d579b94c8e406b8393781c835464bb [ 1904.602687] env[62740]: DEBUG nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1904.604282] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 1acceebadfbf4cddae82bd0adeb331b2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.611620] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b2795b-39d9-437b-bcdf-531c8fb9715f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.640582] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1acceebadfbf4cddae82bd0adeb331b2 [ 1904.641098] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c0daf074-eecb-4899-938f-477031efc6d1 could not be found. [ 1904.641304] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1904.641483] env[62740]: INFO nova.compute.manager [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1904.641726] env[62740]: DEBUG oslo.service.loopingcall [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1904.647746] env[62740]: DEBUG nova.compute.manager [-] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1904.647851] env[62740]: DEBUG nova.network.neutron [-] [instance: c0daf074-eecb-4899-938f-477031efc6d1] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1904.660218] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.666860] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0a10fed9ccbe417a829702caaa87d96a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.672861] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a10fed9ccbe417a829702caaa87d96a [ 1904.673211] env[62740]: DEBUG nova.network.neutron [-] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.673566] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 296b1e2183754b10aa3da4d3b766f587 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.681899] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 296b1e2183754b10aa3da4d3b766f587 [ 1904.682461] env[62740]: INFO nova.compute.manager [-] [instance: c0daf074-eecb-4899-938f-477031efc6d1] Took 0.03 seconds to deallocate network for instance. [ 1904.685945] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg b8515f54876f4474ad9e06c73c9af3c4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.711786] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8515f54876f4474ad9e06c73c9af3c4 [ 1904.724526] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg 5e99c2a1250a451689b85f07d2e2cc1e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.761407] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e99c2a1250a451689b85f07d2e2cc1e [ 1904.765426] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f5bac9-4ee9-473d-a6d5-8dde17fba8bb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.768506] env[62740]: DEBUG oslo_concurrency.lockutils [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Lock "c0daf074-eecb-4899-938f-477031efc6d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.178s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.768919] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-47acb4d3-9ba0-4b0c-a9c7-8b4129eae74d tempest-AttachInterfacesTestJSON-805621508 tempest-AttachInterfacesTestJSON-805621508-project-member] Expecting reply to msg ff97ed454def4846b64c4c73833463d6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.769875] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "c0daf074-eecb-4899-938f-477031efc6d1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 95.523s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.770076] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c0daf074-eecb-4899-938f-477031efc6d1] During sync_power_state the instance has a pending task (deleting). Skip. [ 1904.770253] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "c0daf074-eecb-4899-938f-477031efc6d1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.775462] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f338800-8053-4ac2-ab31-1da556ecac4f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.778782] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff97ed454def4846b64c4c73833463d6 [ 1904.807008] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f9bca7-51ce-4f25-bb4e-7357402ca4d6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.813628] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d941f01f-fc33-4c33-a297-454c7c007af3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.826365] env[62740]: DEBUG nova.compute.provider_tree [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1904.826895] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg dc9c523a949d4452a5be63e1131a0dcf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.835467] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc9c523a949d4452a5be63e1131a0dcf [ 1904.836413] env[62740]: DEBUG nova.scheduler.client.report [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1904.838657] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg c9fcb4efc847493984630fcbbd2bdd4c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.849550] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9fcb4efc847493984630fcbbd2bdd4c [ 1904.850219] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.298s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.850917] env[62740]: ERROR nova.compute.manager [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 174f7655-3fb8-458a-8e9c-108936afe738. [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Traceback (most recent call last): [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] result = getattr(controller, method)(*args, **kwargs) [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self._get(image_id) [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] resp, body = self.http_client.get(url, headers=header) [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self.request(url, 'GET', **kwargs) [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self._handle_response(resp) [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] raise exc.from_response(resp, resp.content) [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] During handling of the above exception, another exception occurred: [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Traceback (most recent call last): [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] self.driver.spawn(context, instance, image_meta, [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] self._fetch_image_if_missing(context, vi) [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] image_fetch(context, vi, tmp_image_ds_loc) [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] images.fetch_image( [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] metadata = IMAGE_API.get(context, image_ref) [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1904.850917] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return session.show(context, image_id, [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] _reraise_translated_image_exception(image_id) [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] raise new_exc.with_traceback(exc_trace) [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] result = getattr(controller, method)(*args, **kwargs) [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self._get(image_id) [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] resp, body = self.http_client.get(url, headers=header) [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self.request(url, 'GET', **kwargs) [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self._handle_response(resp) [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] raise exc.from_response(resp, resp.content) [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] nova.exception.ImageNotAuthorized: Not authorized for image 174f7655-3fb8-458a-8e9c-108936afe738. [ 1904.851868] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] [ 1904.851868] env[62740]: DEBUG nova.compute.utils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Not authorized for image 174f7655-3fb8-458a-8e9c-108936afe738. {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1904.852686] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.193s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.854141] env[62740]: INFO nova.compute.claims [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1904.855636] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 82ac15933f3f4a12ba93c9d07a88d64f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.857472] env[62740]: DEBUG nova.compute.manager [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Build of instance 61fea037-aac3-47ef-aa6a-5dfa657d840d was re-scheduled: Not authorized for image 174f7655-3fb8-458a-8e9c-108936afe738. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1904.857947] env[62740]: DEBUG nova.compute.manager [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1904.858188] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquiring lock "refresh_cache-61fea037-aac3-47ef-aa6a-5dfa657d840d" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.858339] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquired lock "refresh_cache-61fea037-aac3-47ef-aa6a-5dfa657d840d" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.858503] env[62740]: DEBUG nova.network.neutron [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1904.858920] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 32079418f72e41d7a2f7ca3050bd1b86 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.864575] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32079418f72e41d7a2f7ca3050bd1b86 [ 1904.883458] env[62740]: DEBUG nova.network.neutron [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1904.888432] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82ac15933f3f4a12ba93c9d07a88d64f [ 1904.890195] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 530e71b8110b4f09a2ef6839fcd8da39 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.899363] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 530e71b8110b4f09a2ef6839fcd8da39 [ 1904.964431] env[62740]: DEBUG nova.network.neutron [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.964943] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 6fd35b1f50504527a70f4b5501fc0e94 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1904.972748] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6fd35b1f50504527a70f4b5501fc0e94 [ 1904.973273] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Releasing lock "refresh_cache-61fea037-aac3-47ef-aa6a-5dfa657d840d" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.973485] env[62740]: DEBUG nova.compute.manager [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1904.973672] env[62740]: DEBUG nova.compute.manager [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Skipping network deallocation for instance since networking was not requested. {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1904.975256] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 0bced65b1ff9496da193a9a21276e1cc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.006985] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0bced65b1ff9496da193a9a21276e1cc [ 1905.009752] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 83ab97680413461f927df330e121db9e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.044060] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83ab97680413461f927df330e121db9e [ 1905.052086] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323466cd-a40b-4fb9-af52-435d12293b6d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.059846] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c03242-a932-4cdc-b00f-e77cd3027f67 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.064046] env[62740]: INFO nova.scheduler.client.report [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Deleted allocations for instance 61fea037-aac3-47ef-aa6a-5dfa657d840d [ 1905.070397] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 2a574a6c5b224fa3894a8eceea107fbd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.097616] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a574a6c5b224fa3894a8eceea107fbd [ 1905.098502] env[62740]: DEBUG oslo_concurrency.lockutils [None req-048fcd73-c0e9-431c-a08b-94f6b3073531 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Lock "61fea037-aac3-47ef-aa6a-5dfa657d840d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 607.472s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.099238] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ac549c-de8e-4d81-962c-2750f2451374 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.102240] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg bec2bbe02d0c4f4f98251717b0711987 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.103151] env[62740]: DEBUG oslo_concurrency.lockutils [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Lock "61fea037-aac3-47ef-aa6a-5dfa657d840d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 411.379s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.103431] env[62740]: DEBUG oslo_concurrency.lockutils [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquiring lock "61fea037-aac3-47ef-aa6a-5dfa657d840d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1905.103678] env[62740]: DEBUG oslo_concurrency.lockutils [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Lock "61fea037-aac3-47ef-aa6a-5dfa657d840d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.103844] env[62740]: DEBUG oslo_concurrency.lockutils [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Lock "61fea037-aac3-47ef-aa6a-5dfa657d840d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.106474] env[62740]: INFO nova.compute.manager [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Terminating instance [ 1905.108039] env[62740]: DEBUG oslo_concurrency.lockutils [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquiring lock "refresh_cache-61fea037-aac3-47ef-aa6a-5dfa657d840d" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.108207] env[62740]: DEBUG oslo_concurrency.lockutils [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Acquired lock "refresh_cache-61fea037-aac3-47ef-aa6a-5dfa657d840d" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.108375] env[62740]: DEBUG nova.network.neutron [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1905.108828] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 192b129db38a4f3c8f0818b86e0870cd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.112273] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bec2bbe02d0c4f4f98251717b0711987 [ 1905.113562] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93bd3c23-bb7a-435c-a902-d638ae88b0e5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.117352] env[62740]: DEBUG nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1905.118942] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg eee25f39078f4675adc716f7ef899c5d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.120065] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 192b129db38a4f3c8f0818b86e0870cd [ 1905.132898] env[62740]: DEBUG nova.compute.provider_tree [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1905.133405] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 50e95fa9fdd24fddbd14d9d75c03e113 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.139136] env[62740]: DEBUG nova.network.neutron [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1905.141119] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50e95fa9fdd24fddbd14d9d75c03e113 [ 1905.142202] env[62740]: DEBUG nova.scheduler.client.report [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1905.144443] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 70497f4aa816434db5a9bc6de126d9ef in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.146883] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eee25f39078f4675adc716f7ef899c5d [ 1905.155511] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70497f4aa816434db5a9bc6de126d9ef [ 1905.156178] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.303s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.156636] env[62740]: DEBUG nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1905.158191] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 0a7529b7a9d040cda0606e40516368bf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.161525] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1905.161793] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.163119] env[62740]: INFO nova.compute.claims [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1905.164962] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg e593dfd666d141d4b99b7e1c83490684 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.189453] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a7529b7a9d040cda0606e40516368bf [ 1905.190705] env[62740]: DEBUG nova.compute.utils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1905.191375] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 18f4754601ab497481d4ccca91f6b31e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.192190] env[62740]: DEBUG nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1905.192429] env[62740]: DEBUG nova.network.neutron [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1905.194557] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e593dfd666d141d4b99b7e1c83490684 [ 1905.196171] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg cf694b0546274b59a454327833a743f1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.198844] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18f4754601ab497481d4ccca91f6b31e [ 1905.199352] env[62740]: DEBUG nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1905.200899] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg a9c5a5a8b16e40ad99eb3936b10d68c9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.202418] env[62740]: DEBUG nova.network.neutron [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1905.202741] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg c0c7f46cb92d4802aff93d410ee6d3de in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.203722] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf694b0546274b59a454327833a743f1 [ 1905.209411] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0c7f46cb92d4802aff93d410ee6d3de [ 1905.209971] env[62740]: DEBUG oslo_concurrency.lockutils [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Releasing lock "refresh_cache-61fea037-aac3-47ef-aa6a-5dfa657d840d" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.210350] env[62740]: DEBUG nova.compute.manager [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1905.210539] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1905.211051] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2647ed6-3a6d-41c7-8ba9-4e4f81f99b33 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.221008] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd0cce2-cf1e-484c-9a20-59fe56d91925 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.233748] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9c5a5a8b16e40ad99eb3936b10d68c9 [ 1905.236381] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 3199a1495fbb459faf6a08222d762f15 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.254030] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 61fea037-aac3-47ef-aa6a-5dfa657d840d could not be found. [ 1905.254193] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1905.254380] env[62740]: INFO nova.compute.manager [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1905.254628] env[62740]: DEBUG oslo.service.loopingcall [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1905.257240] env[62740]: DEBUG nova.compute.manager [-] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1905.257395] env[62740]: DEBUG nova.network.neutron [-] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1905.260160] env[62740]: DEBUG nova.policy [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa549a18cbf84678844e14ddd094d70e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '065d149aea7645d7a5e32c0d14ff0936', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1905.269037] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3199a1495fbb459faf6a08222d762f15 [ 1905.269600] env[62740]: DEBUG nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1905.361908] env[62740]: DEBUG nova.virt.hardware [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1905.362054] env[62740]: DEBUG nova.virt.hardware [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1905.362229] env[62740]: DEBUG nova.virt.hardware [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1905.362419] env[62740]: DEBUG nova.virt.hardware [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1905.362569] env[62740]: DEBUG nova.virt.hardware [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1905.362720] env[62740]: DEBUG nova.virt.hardware [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1905.362964] env[62740]: DEBUG nova.virt.hardware [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1905.363116] env[62740]: DEBUG nova.virt.hardware [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1905.363270] env[62740]: DEBUG nova.virt.hardware [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1905.363468] env[62740]: DEBUG nova.virt.hardware [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1905.363607] env[62740]: DEBUG nova.virt.hardware [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1905.364484] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2306f2-7373-4913-ba72-4853abb3c1a8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.374927] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5159c47-4fdb-43ed-80e0-1d2103c0ac26 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.394020] env[62740]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62740) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1905.394020] env[62740]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-e4bfd439-cccc-49d9-8354-e3608e3b9620'] [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1905.394020] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1905.395354] env[62740]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1905.395354] env[62740]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1905.395354] env[62740]: ERROR oslo.service.loopingcall [ 1905.395354] env[62740]: ERROR nova.compute.manager [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1905.395354] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 9ad8b785f4cb4243a23aea1575fcc42e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.410236] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37de3cf7-4880-4121-b013-9eea2ea339b3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.419021] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bec33b0-64ea-4eed-bca6-50f6329bca4b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.448241] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ad8b785f4cb4243a23aea1575fcc42e [ 1905.449200] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4d9e91-bef9-4b64-ad8e-65ab7632d4d7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.453768] env[62740]: ERROR nova.compute.manager [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Traceback (most recent call last): [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] ret = obj(*args, **kwargs) [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] exception_handler_v20(status_code, error_body) [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] raise client_exc(message=error_message, [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Neutron server returns request_ids: ['req-e4bfd439-cccc-49d9-8354-e3608e3b9620'] [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] During handling of the above exception, another exception occurred: [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Traceback (most recent call last): [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] self._delete_instance(context, instance, bdms) [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] self._shutdown_instance(context, instance, bdms) [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] self._try_deallocate_network(context, instance, requested_networks) [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] with excutils.save_and_reraise_exception(): [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] self.force_reraise() [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] raise self.value [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] _deallocate_network_with_retries() [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return evt.wait() [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] result = hub.switch() [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self.greenlet.switch() [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] result = func(*self.args, **self.kw) [ 1905.453768] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] result = f(*args, **kwargs) [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] self._deallocate_network( [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] self.network_api.deallocate_for_instance( [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] data = neutron.list_ports(**search_opts) [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] ret = obj(*args, **kwargs) [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self.list('ports', self.ports_path, retrieve_all, [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] ret = obj(*args, **kwargs) [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] for r in self._pagination(collection, path, **params): [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] res = self.get(path, params=params) [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] ret = obj(*args, **kwargs) [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self.retry_request("GET", action, body=body, [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] ret = obj(*args, **kwargs) [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] return self.do_request(method, action, body=body, [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] ret = obj(*args, **kwargs) [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] self._handle_fault_response(status_code, replybody, resp) [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1905.454901] env[62740]: ERROR nova.compute.manager [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] [ 1905.455961] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 127686ae90134710be4bb6c3754177c1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.462532] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa04740f-77aa-4379-99f3-2b999f538b6c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.479643] env[62740]: DEBUG nova.compute.provider_tree [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1905.480175] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 1d16eb161c8f474189df696bcc4447a3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.481327] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 127686ae90134710be4bb6c3754177c1 [ 1905.482388] env[62740]: DEBUG oslo_concurrency.lockutils [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Lock "61fea037-aac3-47ef-aa6a-5dfa657d840d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.379s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.482859] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg 03ffd337fdb44daa8831034168caa599 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.483814] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "61fea037-aac3-47ef-aa6a-5dfa657d840d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 96.236s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.484070] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1905.484439] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "61fea037-aac3-47ef-aa6a-5dfa657d840d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.487450] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d16eb161c8f474189df696bcc4447a3 [ 1905.488258] env[62740]: DEBUG nova.scheduler.client.report [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1905.491873] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 6830317f311248ffa8d3585ded481523 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.496612] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03ffd337fdb44daa8831034168caa599 [ 1905.497549] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg be90e36a8e7f4119a78e73e1714f0889 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.504090] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6830317f311248ffa8d3585ded481523 [ 1905.504786] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.343s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.505274] env[62740]: DEBUG nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1905.506990] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 1541322ba38b4b2692e598458f0edc35 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.521920] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be90e36a8e7f4119a78e73e1714f0889 [ 1905.524294] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Expecting reply to msg bf41b30072c64356b3ac7f76b0c6bc75 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.544287] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1541322ba38b4b2692e598458f0edc35 [ 1905.545462] env[62740]: DEBUG nova.compute.utils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1905.546078] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 0b9cde2194fd494eb1d203a8b6087f3c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.547323] env[62740]: DEBUG nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1905.547323] env[62740]: DEBUG nova.network.neutron [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1905.555789] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf41b30072c64356b3ac7f76b0c6bc75 [ 1905.556728] env[62740]: INFO nova.compute.manager [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] [instance: 61fea037-aac3-47ef-aa6a-5dfa657d840d] Successfully reverted task state from None on failure for instance. [ 1905.559458] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b9cde2194fd494eb1d203a8b6087f3c [ 1905.560022] env[62740]: DEBUG nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1905.565016] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg a014bb3da3124736b5b1923061f516c5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server [None req-18bf448e-10cb-41b9-94e6-cda5e9de85c7 tempest-ServerShowV257Test-323168147 tempest-ServerShowV257Test-323168147-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-e4bfd439-cccc-49d9-8354-e3608e3b9620'] [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server raise self.value [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server raise self.value [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server raise self.value [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1905.565016] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server raise self.value [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server raise self.value [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1905.566525] env[62740]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1905.567941] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1905.567941] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1905.567941] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1905.567941] env[62740]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1905.567941] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1905.567941] env[62740]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1905.567941] env[62740]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1905.567941] env[62740]: ERROR oslo_messaging.rpc.server [ 1905.593378] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a014bb3da3124736b5b1923061f516c5 [ 1905.599252] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 344099994c324eefa462489a8a3e0ec3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1905.617291] env[62740]: DEBUG nova.policy [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92b7913efc094fd090cd51f76f3eaf4b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1362f27348894a139cf80a8ea6449984', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1905.625151] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 344099994c324eefa462489a8a3e0ec3 [ 1905.626256] env[62740]: DEBUG nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1905.649890] env[62740]: DEBUG nova.virt.hardware [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1905.650146] env[62740]: DEBUG nova.virt.hardware [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1905.650307] env[62740]: DEBUG nova.virt.hardware [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1905.650496] env[62740]: DEBUG nova.virt.hardware [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1905.650643] env[62740]: DEBUG nova.virt.hardware [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1905.650791] env[62740]: DEBUG nova.virt.hardware [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1905.651030] env[62740]: DEBUG nova.virt.hardware [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1905.651207] env[62740]: DEBUG nova.virt.hardware [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1905.651378] env[62740]: DEBUG nova.virt.hardware [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1905.651542] env[62740]: DEBUG nova.virt.hardware [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1905.651713] env[62740]: DEBUG nova.virt.hardware [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1905.652567] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c79d48-cd0b-4fe4-94d2-c63c8dd8d6a2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.660767] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449ba7f2-ca44-4d23-a128-c960da949f00 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.718981] env[62740]: DEBUG nova.network.neutron [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Successfully created port: 1b1767fc-2a18-4d21-839d-660e0bf3e49a {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1906.136271] env[62740]: DEBUG nova.network.neutron [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Successfully created port: 591c713b-055c-44c1-b0d0-a5fdd9b941e6 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1906.403831] env[62740]: DEBUG nova.compute.manager [req-8bcd769b-263c-49eb-918f-6810a77f657b req-19d8330d-fbf2-4066-ab1d-ff94a574ba9e service nova] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Received event network-vif-plugged-1b1767fc-2a18-4d21-839d-660e0bf3e49a {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1906.404079] env[62740]: DEBUG oslo_concurrency.lockutils [req-8bcd769b-263c-49eb-918f-6810a77f657b req-19d8330d-fbf2-4066-ab1d-ff94a574ba9e service nova] Acquiring lock "4ee71d81-7d8b-42f8-a27c-b4645169fa3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.404294] env[62740]: DEBUG oslo_concurrency.lockutils [req-8bcd769b-263c-49eb-918f-6810a77f657b req-19d8330d-fbf2-4066-ab1d-ff94a574ba9e service nova] Lock "4ee71d81-7d8b-42f8-a27c-b4645169fa3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1906.404460] env[62740]: DEBUG oslo_concurrency.lockutils [req-8bcd769b-263c-49eb-918f-6810a77f657b req-19d8330d-fbf2-4066-ab1d-ff94a574ba9e service nova] Lock "4ee71d81-7d8b-42f8-a27c-b4645169fa3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.404626] env[62740]: DEBUG nova.compute.manager [req-8bcd769b-263c-49eb-918f-6810a77f657b req-19d8330d-fbf2-4066-ab1d-ff94a574ba9e service nova] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] No waiting events found dispatching network-vif-plugged-1b1767fc-2a18-4d21-839d-660e0bf3e49a {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1906.404782] env[62740]: WARNING nova.compute.manager [req-8bcd769b-263c-49eb-918f-6810a77f657b req-19d8330d-fbf2-4066-ab1d-ff94a574ba9e service nova] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Received unexpected event network-vif-plugged-1b1767fc-2a18-4d21-839d-660e0bf3e49a for instance with vm_state building and task_state spawning. [ 1906.415129] env[62740]: DEBUG nova.network.neutron [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Successfully updated port: 1b1767fc-2a18-4d21-839d-660e0bf3e49a {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1906.415579] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 858a91e101194f0fbb7cf1f29b40f607 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1906.428785] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 858a91e101194f0fbb7cf1f29b40f607 [ 1906.429474] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "refresh_cache-4ee71d81-7d8b-42f8-a27c-b4645169fa3e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1906.429578] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "refresh_cache-4ee71d81-7d8b-42f8-a27c-b4645169fa3e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1906.429724] env[62740]: DEBUG nova.network.neutron [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1906.430115] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 3ac35f1a3b0d4353bf1eedfd7eb12627 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1906.437616] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ac35f1a3b0d4353bf1eedfd7eb12627 [ 1906.490960] env[62740]: DEBUG nova.network.neutron [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1906.615807] env[62740]: DEBUG nova.network.neutron [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Successfully created port: 40538458-9321-4a6b-84fb-cd5d7205408d {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1906.982794] env[62740]: DEBUG nova.network.neutron [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Updating instance_info_cache with network_info: [{"id": "1b1767fc-2a18-4d21-839d-660e0bf3e49a", "address": "fa:16:3e:ef:8c:b1", "network": {"id": "a1bf429f-63e1-4b06-ba31-36e8e686268d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1763096855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "065d149aea7645d7a5e32c0d14ff0936", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b1767fc-2a", "ovs_interfaceid": "1b1767fc-2a18-4d21-839d-660e0bf3e49a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1906.983394] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg a41695394ead4d0b9881906689e0cfa1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1906.995615] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a41695394ead4d0b9881906689e0cfa1 [ 1906.996584] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Releasing lock "refresh_cache-4ee71d81-7d8b-42f8-a27c-b4645169fa3e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1906.996877] env[62740]: DEBUG nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Instance network_info: |[{"id": "1b1767fc-2a18-4d21-839d-660e0bf3e49a", "address": "fa:16:3e:ef:8c:b1", "network": {"id": "a1bf429f-63e1-4b06-ba31-36e8e686268d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1763096855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "065d149aea7645d7a5e32c0d14ff0936", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b1767fc-2a", "ovs_interfaceid": "1b1767fc-2a18-4d21-839d-660e0bf3e49a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1906.997621] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:8c:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b399c74-1411-408a-b4cd-84e268ae83fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b1767fc-2a18-4d21-839d-660e0bf3e49a', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1907.006479] env[62740]: DEBUG oslo.service.loopingcall [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1907.007112] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1907.007433] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0983d7b4-0ab4-42a2-b89a-d1d817e088a3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.025980] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg cfefaa7aa80741d193eaa90b60b4ba09 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1907.034084] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1907.034084] env[62740]: value = "task-640336" [ 1907.034084] env[62740]: _type = "Task" [ 1907.034084] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.037078] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfefaa7aa80741d193eaa90b60b4ba09 [ 1907.037708] env[62740]: DEBUG oslo_concurrency.lockutils [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "4ee71d81-7d8b-42f8-a27c-b4645169fa3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.042913] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640336, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.238667] env[62740]: DEBUG nova.compute.manager [req-c44c028c-608b-48f3-ad8a-c1d5b12b46f6 req-69327e1f-b2c9-4ebe-bdc2-1ebb2853a52b service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Received event network-vif-plugged-591c713b-055c-44c1-b0d0-a5fdd9b941e6 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1907.238934] env[62740]: DEBUG oslo_concurrency.lockutils [req-c44c028c-608b-48f3-ad8a-c1d5b12b46f6 req-69327e1f-b2c9-4ebe-bdc2-1ebb2853a52b service nova] Acquiring lock "b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.239184] env[62740]: DEBUG oslo_concurrency.lockutils [req-c44c028c-608b-48f3-ad8a-c1d5b12b46f6 req-69327e1f-b2c9-4ebe-bdc2-1ebb2853a52b service nova] Lock "b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.239356] env[62740]: DEBUG oslo_concurrency.lockutils [req-c44c028c-608b-48f3-ad8a-c1d5b12b46f6 req-69327e1f-b2c9-4ebe-bdc2-1ebb2853a52b service nova] Lock "b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1907.239525] env[62740]: DEBUG nova.compute.manager [req-c44c028c-608b-48f3-ad8a-c1d5b12b46f6 req-69327e1f-b2c9-4ebe-bdc2-1ebb2853a52b service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] No waiting events found dispatching network-vif-plugged-591c713b-055c-44c1-b0d0-a5fdd9b941e6 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1907.239706] env[62740]: WARNING nova.compute.manager [req-c44c028c-608b-48f3-ad8a-c1d5b12b46f6 req-69327e1f-b2c9-4ebe-bdc2-1ebb2853a52b service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Received unexpected event network-vif-plugged-591c713b-055c-44c1-b0d0-a5fdd9b941e6 for instance with vm_state building and task_state spawning. [ 1907.322680] env[62740]: DEBUG nova.network.neutron [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Successfully updated port: 591c713b-055c-44c1-b0d0-a5fdd9b941e6 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1907.323225] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 4634ff47bfb44e9888957d0df79e4a07 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1907.332666] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4634ff47bfb44e9888957d0df79e4a07 [ 1907.545255] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640336, 'name': CreateVM_Task, 'duration_secs': 0.30013} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.545488] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1907.546161] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1907.546332] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1907.546670] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1907.546938] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-576a6b6d-04bb-4dd8-a427-67c4f29b3847 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.551312] env[62740]: DEBUG oslo_vmware.api [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 1907.551312] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52a52a80-bf76-62e1-0564-8322830cea98" [ 1907.551312] env[62740]: _type = "Task" [ 1907.551312] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.558582] env[62740]: DEBUG oslo_vmware.api [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52a52a80-bf76-62e1-0564-8322830cea98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.018663] env[62740]: DEBUG nova.network.neutron [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Successfully updated port: 40538458-9321-4a6b-84fb-cd5d7205408d {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1908.019241] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg efeb10b1b40b49859dffc0a803fa00b3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1908.037107] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efeb10b1b40b49859dffc0a803fa00b3 [ 1908.037759] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "refresh_cache-b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.037913] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquired lock "refresh_cache-b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1908.038085] env[62740]: DEBUG nova.network.neutron [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1908.038468] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg ede580c208ae487b9eba68ecb1d8e896 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1908.050814] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ede580c208ae487b9eba68ecb1d8e896 [ 1908.062879] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1908.063144] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1908.063362] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.110524] env[62740]: DEBUG nova.network.neutron [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1908.429619] env[62740]: DEBUG nova.compute.manager [req-85243449-378f-4446-9ef9-77f8609bfd12 req-e8ea9296-0dbd-4e84-93aa-47d64d4dd2a3 service nova] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Received event network-changed-1b1767fc-2a18-4d21-839d-660e0bf3e49a {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1908.429872] env[62740]: DEBUG nova.compute.manager [req-85243449-378f-4446-9ef9-77f8609bfd12 req-e8ea9296-0dbd-4e84-93aa-47d64d4dd2a3 service nova] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Refreshing instance network info cache due to event network-changed-1b1767fc-2a18-4d21-839d-660e0bf3e49a. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1908.430118] env[62740]: DEBUG oslo_concurrency.lockutils [req-85243449-378f-4446-9ef9-77f8609bfd12 req-e8ea9296-0dbd-4e84-93aa-47d64d4dd2a3 service nova] Acquiring lock "refresh_cache-4ee71d81-7d8b-42f8-a27c-b4645169fa3e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.430271] env[62740]: DEBUG oslo_concurrency.lockutils [req-85243449-378f-4446-9ef9-77f8609bfd12 req-e8ea9296-0dbd-4e84-93aa-47d64d4dd2a3 service nova] Acquired lock "refresh_cache-4ee71d81-7d8b-42f8-a27c-b4645169fa3e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1908.430436] env[62740]: DEBUG nova.network.neutron [req-85243449-378f-4446-9ef9-77f8609bfd12 req-e8ea9296-0dbd-4e84-93aa-47d64d4dd2a3 service nova] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Refreshing network info cache for port 1b1767fc-2a18-4d21-839d-660e0bf3e49a {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1908.431010] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-85243449-378f-4446-9ef9-77f8609bfd12 req-e8ea9296-0dbd-4e84-93aa-47d64d4dd2a3 service nova] Expecting reply to msg c18d52edeac2493689d500e76320f5d6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1908.438570] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c18d52edeac2493689d500e76320f5d6 [ 1908.493791] env[62740]: DEBUG nova.network.neutron [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Updating instance_info_cache with network_info: [{"id": "591c713b-055c-44c1-b0d0-a5fdd9b941e6", "address": "fa:16:3e:cd:d9:fc", "network": {"id": "f8b76651-624b-4cbd-be42-91391d9a4f7a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1565101591", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap591c713b-05", "ovs_interfaceid": "591c713b-055c-44c1-b0d0-a5fdd9b941e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "40538458-9321-4a6b-84fb-cd5d7205408d", "address": "fa:16:3e:b1:65:70", "network": {"id": "0f537093-00e6-4791-9381-b8deccdf2480", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1785573648", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fdd0624-2edb-4733-8284-225815c07f73", "external-id": "nsx-vlan-transportzone-330", "segmentation_id": 330, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40538458-93", "ovs_interfaceid": "40538458-9321-4a6b-84fb-cd5d7205408d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1908.494375] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg f3dcb86d956a40ed96c335bb752432ea in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1908.508466] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3dcb86d956a40ed96c335bb752432ea [ 1908.509108] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Releasing lock "refresh_cache-b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1908.509428] env[62740]: DEBUG nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Instance network_info: |[{"id": "591c713b-055c-44c1-b0d0-a5fdd9b941e6", "address": "fa:16:3e:cd:d9:fc", "network": {"id": "f8b76651-624b-4cbd-be42-91391d9a4f7a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1565101591", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap591c713b-05", "ovs_interfaceid": "591c713b-055c-44c1-b0d0-a5fdd9b941e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "40538458-9321-4a6b-84fb-cd5d7205408d", "address": "fa:16:3e:b1:65:70", "network": {"id": "0f537093-00e6-4791-9381-b8deccdf2480", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1785573648", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fdd0624-2edb-4733-8284-225815c07f73", "external-id": "nsx-vlan-transportzone-330", "segmentation_id": 330, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40538458-93", "ovs_interfaceid": "40538458-9321-4a6b-84fb-cd5d7205408d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1908.509869] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:d9:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78e1ebb0-0130-446b-bf73-a0e59bbb95cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '591c713b-055c-44c1-b0d0-a5fdd9b941e6', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:65:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5fdd0624-2edb-4733-8284-225815c07f73', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40538458-9321-4a6b-84fb-cd5d7205408d', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1908.519370] env[62740]: DEBUG oslo.service.loopingcall [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1908.519883] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1908.520135] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-626b5092-3574-4e2e-b830-3572c3e7f279 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.545300] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1908.545300] env[62740]: value = "task-640337" [ 1908.545300] env[62740]: _type = "Task" [ 1908.545300] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.553486] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640337, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.735185] env[62740]: DEBUG nova.network.neutron [req-85243449-378f-4446-9ef9-77f8609bfd12 req-e8ea9296-0dbd-4e84-93aa-47d64d4dd2a3 service nova] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Updated VIF entry in instance network info cache for port 1b1767fc-2a18-4d21-839d-660e0bf3e49a. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1908.735608] env[62740]: DEBUG nova.network.neutron [req-85243449-378f-4446-9ef9-77f8609bfd12 req-e8ea9296-0dbd-4e84-93aa-47d64d4dd2a3 service nova] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Updating instance_info_cache with network_info: [{"id": "1b1767fc-2a18-4d21-839d-660e0bf3e49a", "address": "fa:16:3e:ef:8c:b1", "network": {"id": "a1bf429f-63e1-4b06-ba31-36e8e686268d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1763096855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "065d149aea7645d7a5e32c0d14ff0936", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b1767fc-2a", "ovs_interfaceid": "1b1767fc-2a18-4d21-839d-660e0bf3e49a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1908.736223] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-85243449-378f-4446-9ef9-77f8609bfd12 req-e8ea9296-0dbd-4e84-93aa-47d64d4dd2a3 service nova] Expecting reply to msg 8ee399db37fa4b0a931eb068ecf75318 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1908.745380] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ee399db37fa4b0a931eb068ecf75318 [ 1908.745994] env[62740]: DEBUG oslo_concurrency.lockutils [req-85243449-378f-4446-9ef9-77f8609bfd12 req-e8ea9296-0dbd-4e84-93aa-47d64d4dd2a3 service nova] Releasing lock "refresh_cache-4ee71d81-7d8b-42f8-a27c-b4645169fa3e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1909.055099] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640337, 'name': CreateVM_Task, 'duration_secs': 0.329372} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.055437] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1909.056055] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1909.056193] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1909.056505] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1909.056792] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19f61456-9131-44c8-9f59-2a4bb92207a9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.061255] env[62740]: DEBUG oslo_vmware.api [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Waiting for the task: (returnval){ [ 1909.061255] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5285b72c-3de6-100c-5a6e-cda489e7c7b6" [ 1909.061255] env[62740]: _type = "Task" [ 1909.061255] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.068582] env[62740]: DEBUG oslo_vmware.api [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5285b72c-3de6-100c-5a6e-cda489e7c7b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.266766] env[62740]: DEBUG nova.compute.manager [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Received event network-changed-591c713b-055c-44c1-b0d0-a5fdd9b941e6 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1909.267074] env[62740]: DEBUG nova.compute.manager [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Refreshing instance network info cache due to event network-changed-591c713b-055c-44c1-b0d0-a5fdd9b941e6. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1909.267283] env[62740]: DEBUG oslo_concurrency.lockutils [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] Acquiring lock "refresh_cache-b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1909.267429] env[62740]: DEBUG oslo_concurrency.lockutils [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] Acquired lock "refresh_cache-b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1909.267590] env[62740]: DEBUG nova.network.neutron [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Refreshing network info cache for port 591c713b-055c-44c1-b0d0-a5fdd9b941e6 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1909.268108] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] Expecting reply to msg 6350d8467f6746d9a5e1fc3aa9087136 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1909.277610] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6350d8467f6746d9a5e1fc3aa9087136 [ 1909.572369] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1909.572577] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1909.572760] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1909.574816] env[62740]: DEBUG nova.network.neutron [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Updated VIF entry in instance network info cache for port 591c713b-055c-44c1-b0d0-a5fdd9b941e6. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1909.575204] env[62740]: DEBUG nova.network.neutron [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Updating instance_info_cache with network_info: [{"id": "591c713b-055c-44c1-b0d0-a5fdd9b941e6", "address": "fa:16:3e:cd:d9:fc", "network": {"id": "f8b76651-624b-4cbd-be42-91391d9a4f7a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1565101591", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap591c713b-05", "ovs_interfaceid": "591c713b-055c-44c1-b0d0-a5fdd9b941e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "40538458-9321-4a6b-84fb-cd5d7205408d", "address": "fa:16:3e:b1:65:70", "network": {"id": "0f537093-00e6-4791-9381-b8deccdf2480", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1785573648", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fdd0624-2edb-4733-8284-225815c07f73", "external-id": "nsx-vlan-transportzone-330", "segmentation_id": 330, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40538458-93", "ovs_interfaceid": "40538458-9321-4a6b-84fb-cd5d7205408d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1909.575725] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] Expecting reply to msg f01435c816b74b3eab3910c8bdd5fa83 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1909.584968] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f01435c816b74b3eab3910c8bdd5fa83 [ 1909.585595] env[62740]: DEBUG oslo_concurrency.lockutils [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] Releasing lock "refresh_cache-b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1909.585816] env[62740]: DEBUG nova.compute.manager [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Received event network-vif-plugged-40538458-9321-4a6b-84fb-cd5d7205408d {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1909.586016] env[62740]: DEBUG oslo_concurrency.lockutils [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] Acquiring lock "b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1909.586217] env[62740]: DEBUG oslo_concurrency.lockutils [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] Lock "b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.586379] env[62740]: DEBUG oslo_concurrency.lockutils [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] Lock "b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1909.586546] env[62740]: DEBUG nova.compute.manager [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] No waiting events found dispatching network-vif-plugged-40538458-9321-4a6b-84fb-cd5d7205408d {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1909.586712] env[62740]: WARNING nova.compute.manager [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Received unexpected event network-vif-plugged-40538458-9321-4a6b-84fb-cd5d7205408d for instance with vm_state building and task_state spawning. [ 1909.586876] env[62740]: DEBUG nova.compute.manager [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Received event network-changed-40538458-9321-4a6b-84fb-cd5d7205408d {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1909.587041] env[62740]: DEBUG nova.compute.manager [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Refreshing instance network info cache due to event network-changed-40538458-9321-4a6b-84fb-cd5d7205408d. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1909.587226] env[62740]: DEBUG oslo_concurrency.lockutils [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] Acquiring lock "refresh_cache-b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1909.587363] env[62740]: DEBUG oslo_concurrency.lockutils [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] Acquired lock "refresh_cache-b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1909.587596] env[62740]: DEBUG nova.network.neutron [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Refreshing network info cache for port 40538458-9321-4a6b-84fb-cd5d7205408d {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1909.588226] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] Expecting reply to msg 0c3227efdfe2468ba4ab03badd8953e1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1909.595890] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c3227efdfe2468ba4ab03badd8953e1 [ 1909.891591] env[62740]: DEBUG nova.network.neutron [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Updated VIF entry in instance network info cache for port 40538458-9321-4a6b-84fb-cd5d7205408d. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1909.892047] env[62740]: DEBUG nova.network.neutron [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Updating instance_info_cache with network_info: [{"id": "591c713b-055c-44c1-b0d0-a5fdd9b941e6", "address": "fa:16:3e:cd:d9:fc", "network": {"id": "f8b76651-624b-4cbd-be42-91391d9a4f7a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1565101591", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap591c713b-05", "ovs_interfaceid": "591c713b-055c-44c1-b0d0-a5fdd9b941e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "40538458-9321-4a6b-84fb-cd5d7205408d", "address": "fa:16:3e:b1:65:70", "network": {"id": "0f537093-00e6-4791-9381-b8deccdf2480", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1785573648", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "1362f27348894a139cf80a8ea6449984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fdd0624-2edb-4733-8284-225815c07f73", "external-id": "nsx-vlan-transportzone-330", "segmentation_id": 330, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40538458-93", "ovs_interfaceid": "40538458-9321-4a6b-84fb-cd5d7205408d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1909.892691] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] Expecting reply to msg 3ddca90e013040d7bac3bcb94e058254 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1909.901290] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ddca90e013040d7bac3bcb94e058254 [ 1909.901973] env[62740]: DEBUG oslo_concurrency.lockutils [req-9a44e086-1f29-48cf-b30a-8fdb8d7c45d3 req-3398a95b-a13f-4a93-84b6-0e63e1dda080 service nova] Releasing lock "refresh_cache-b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.890453] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1921.890837] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1922.886892] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1922.890521] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1922.890849] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1922.890849] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1922.891490] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ccd0bd1775034108b6eb228c88a4e75d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1922.910273] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ccd0bd1775034108b6eb228c88a4e75d [ 1922.912427] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1922.912583] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1922.912720] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1922.912851] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1922.912977] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1922.913117] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1922.913242] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1922.913366] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1922.913489] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1922.913610] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1922.913730] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1924.890519] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1924.890858] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1924.892290] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1925.891667] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1925.892085] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 4d3ba153339846fb903db89a631f1b6d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1925.903079] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d3ba153339846fb903db89a631f1b6d [ 1925.904090] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.904308] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.904474] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.904629] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1925.905714] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a9bf46-a757-40a0-91bb-35ac6a5a6ff0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.914699] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9379bb2-c132-45f2-aa58-8820d7b83373 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.928617] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92e1bdd-3b34-47e7-b78a-78c270d60958 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.934692] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09d1cac-5d2d-4d46-80f0-f4d8ce0d1865 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.963932] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181633MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1925.964107] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.964307] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.965130] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 86f6c4959fb84bd886a91875e271daf0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1925.998840] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86f6c4959fb84bd886a91875e271daf0 [ 1926.002844] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 9ffb289259ab428c8def0495b7d67b19 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1926.011979] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ffb289259ab428c8def0495b7d67b19 [ 1926.034741] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 86c57375-8328-4344-b228-2f1ce6efc71e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1926.035784] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a41506d2-33b2-40b8-badb-41312c7abbd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1926.035784] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3aa2858e-d422-408a-a83a-98382f971add actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1926.035784] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1926.035784] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1926.035784] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 9a595940-16ba-401a-922f-331cf87093c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1926.036014] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d2fb90b7-1618-4f07-8854-81566887a7cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1926.036048] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 19f27c16-45b3-47d8-acf0-18255844431f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1926.036149] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4ee71d81-7d8b-42f8-a27c-b4645169fa3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1926.036262] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1926.036836] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg a6044d46e18748d29f99a9109994196d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1926.046659] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6044d46e18748d29f99a9109994196d [ 1926.047418] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1b975b29-fbaa-4385-9bf9-33496b4ed129 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1926.047645] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1926.047797] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1926.178978] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8055647a-86f1-453b-8d04-357fc4e1b748 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.186825] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813a4972-3bd5-4a4f-a42e-c2b940e62e76 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.215402] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f469a3a-ba2b-4f4f-bc00-55c6168c5d28 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.222491] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfbf62d-8f2b-42d7-b2a4-646fc94113f0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.241023] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1926.241023] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ff8627ab730f401b8a8829a69debb593 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1926.248116] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff8627ab730f401b8a8829a69debb593 [ 1926.249365] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1926.253303] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 267b8571c9c94548b10ea26f38f37231 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1926.270363] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 267b8571c9c94548b10ea26f38f37231 [ 1926.271174] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1926.271420] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.307s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.270945] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1927.891455] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1951.932471] env[62740]: WARNING oslo_vmware.rw_handles [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1951.932471] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1951.932471] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1951.932471] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1951.932471] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1951.932471] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 1951.932471] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1951.932471] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1951.932471] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1951.932471] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1951.932471] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1951.932471] env[62740]: ERROR oslo_vmware.rw_handles [ 1951.933224] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/1b05221f-daa7-4273-87fb-119eff6b910e/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1951.935033] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1951.935281] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Copying Virtual Disk [datastore2] vmware_temp/1b05221f-daa7-4273-87fb-119eff6b910e/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/1b05221f-daa7-4273-87fb-119eff6b910e/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1951.935576] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0c7fc59c-b026-4d96-b20a-3a38932e412e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.943218] env[62740]: DEBUG oslo_vmware.api [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 1951.943218] env[62740]: value = "task-640338" [ 1951.943218] env[62740]: _type = "Task" [ 1951.943218] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.950844] env[62740]: DEBUG oslo_vmware.api [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': task-640338, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.454107] env[62740]: DEBUG oslo_vmware.exceptions [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1952.454400] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1952.455118] env[62740]: ERROR nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1952.455118] env[62740]: Faults: ['InvalidArgument'] [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Traceback (most recent call last): [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] yield resources [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] self.driver.spawn(context, instance, image_meta, [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] self._fetch_image_if_missing(context, vi) [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] image_cache(vi, tmp_image_ds_loc) [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] vm_util.copy_virtual_disk( [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] session._wait_for_task(vmdk_copy_task) [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] return self.wait_for_task(task_ref) [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] return evt.wait() [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] result = hub.switch() [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] return self.greenlet.switch() [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] self.f(*self.args, **self.kw) [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] raise exceptions.translate_fault(task_info.error) [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Faults: ['InvalidArgument'] [ 1952.455118] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] [ 1952.455886] env[62740]: INFO nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Terminating instance [ 1952.456877] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1952.457099] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1952.457380] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0889ea71-14e2-440d-b717-5b43f665dc9b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.459774] env[62740]: DEBUG nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1952.459968] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1952.460755] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd04f52-e260-40c9-b70e-d678e8803576 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.467114] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1952.467331] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff28ac2e-78cc-4933-a7ea-ab69ddf7a9b0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.469512] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1952.469687] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1952.470623] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-808496e3-c590-4bcb-a7fa-435c2388b4da {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.475222] env[62740]: DEBUG oslo_vmware.api [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Waiting for the task: (returnval){ [ 1952.475222] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]524bfaa7-6750-186d-bb83-a1cabb14b663" [ 1952.475222] env[62740]: _type = "Task" [ 1952.475222] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.481974] env[62740]: DEBUG oslo_vmware.api [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]524bfaa7-6750-186d-bb83-a1cabb14b663, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.539053] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1952.539279] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1952.539456] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Deleting the datastore file [datastore2] 86c57375-8328-4344-b228-2f1ce6efc71e {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1952.539725] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-725f4202-f952-4a5a-8c72-bd2bcafe35f9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.545543] env[62740]: DEBUG oslo_vmware.api [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 1952.545543] env[62740]: value = "task-640340" [ 1952.545543] env[62740]: _type = "Task" [ 1952.545543] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.553576] env[62740]: DEBUG oslo_vmware.api [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': task-640340, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.985405] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1952.985720] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Creating directory with path [datastore2] vmware_temp/4a7f9091-9aa2-421c-a8bc-099381399ae0/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1952.985887] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1de90c10-3db9-4dfa-9df6-55a8e29145ee {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.996569] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Created directory with path [datastore2] vmware_temp/4a7f9091-9aa2-421c-a8bc-099381399ae0/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1952.996761] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Fetch image to [datastore2] vmware_temp/4a7f9091-9aa2-421c-a8bc-099381399ae0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1952.996927] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/4a7f9091-9aa2-421c-a8bc-099381399ae0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1952.997660] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c32e40-b3a7-4d59-8ad1-71e392527791 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.004133] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a3d45b-c3eb-4b7e-8d52-4bf74dae4c9b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.012830] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf8804b-d5df-494d-b08a-97d6c4f52fd3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.043375] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9b8be1-d476-4256-b44b-06a7cf21e0a7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.050546] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2a6231e7-7704-4f3a-a69c-eb4e384666a8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.054664] env[62740]: DEBUG oslo_vmware.api [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': task-640340, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063799} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.055201] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1953.055398] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1953.055577] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1953.055898] env[62740]: INFO nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1953.058048] env[62740]: DEBUG nova.compute.claims [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1953.058048] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.058193] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.060124] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 95e608ab4f934c37b47b846750753440 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1953.076916] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1953.103928] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95e608ab4f934c37b47b846750753440 [ 1953.128350] env[62740]: DEBUG oslo_vmware.rw_handles [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4a7f9091-9aa2-421c-a8bc-099381399ae0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1953.188898] env[62740]: DEBUG oslo_vmware.rw_handles [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1953.189084] env[62740]: DEBUG oslo_vmware.rw_handles [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4a7f9091-9aa2-421c-a8bc-099381399ae0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1953.285906] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3bc1702-83b3-46bb-b1c5-aae2ebb4e857 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.293732] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bfa5ef2-2063-4e62-8244-30b248be55bf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.322912] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae4bde0-aa51-4dfb-81ca-1fbdd60ca32a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.329344] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5fbde3-572f-4069-97e7-e88fb87dc55f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.341461] env[62740]: DEBUG nova.compute.provider_tree [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1953.341917] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg ff9d0676a4174e98990ad2152464f986 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1953.349134] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff9d0676a4174e98990ad2152464f986 [ 1953.349992] env[62740]: DEBUG nova.scheduler.client.report [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1953.352203] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 4c13411c9f244805ae033508e7c1b8ee in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1953.363035] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c13411c9f244805ae033508e7c1b8ee [ 1953.363674] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.305s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.364191] env[62740]: ERROR nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1953.364191] env[62740]: Faults: ['InvalidArgument'] [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Traceback (most recent call last): [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] self.driver.spawn(context, instance, image_meta, [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] self._fetch_image_if_missing(context, vi) [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] image_cache(vi, tmp_image_ds_loc) [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] vm_util.copy_virtual_disk( [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] session._wait_for_task(vmdk_copy_task) [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] return self.wait_for_task(task_ref) [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] return evt.wait() [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] result = hub.switch() [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] return self.greenlet.switch() [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] self.f(*self.args, **self.kw) [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] raise exceptions.translate_fault(task_info.error) [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Faults: ['InvalidArgument'] [ 1953.364191] env[62740]: ERROR nova.compute.manager [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] [ 1953.364992] env[62740]: DEBUG nova.compute.utils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1953.366253] env[62740]: DEBUG nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Build of instance 86c57375-8328-4344-b228-2f1ce6efc71e was re-scheduled: A specified parameter was not correct: fileType [ 1953.366253] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1953.366642] env[62740]: DEBUG nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1953.366849] env[62740]: DEBUG nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1953.367058] env[62740]: DEBUG nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1953.367210] env[62740]: DEBUG nova.network.neutron [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1953.693318] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg f4c314814afc44f5bf89f3b97b536dcf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1953.703501] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4c314814afc44f5bf89f3b97b536dcf [ 1953.704091] env[62740]: DEBUG nova.network.neutron [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1953.704574] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg f4590e95568b4411ac1cdbe3985feae0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1953.717469] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4590e95568b4411ac1cdbe3985feae0 [ 1953.717720] env[62740]: INFO nova.compute.manager [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Took 0.35 seconds to deallocate network for instance. [ 1953.719456] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg aceaf571ec254c60ba703340997b4054 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1953.764035] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aceaf571ec254c60ba703340997b4054 [ 1953.767624] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 341c1cc042e64448ac46799204ff2c00 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1953.796392] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 341c1cc042e64448ac46799204ff2c00 [ 1953.825867] env[62740]: INFO nova.scheduler.client.report [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Deleted allocations for instance 86c57375-8328-4344-b228-2f1ce6efc71e [ 1953.831791] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg a665367c9d814ac1b24f6933f704dc39 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1953.846104] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a665367c9d814ac1b24f6933f704dc39 [ 1953.846662] env[62740]: DEBUG oslo_concurrency.lockutils [None req-30e993af-6981-4356-a6a7-e2ac08500870 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "86c57375-8328-4344-b228-2f1ce6efc71e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 635.507s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.847207] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 10ca33f525e3478bb28cf4ddc310ca59 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1953.847912] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "86c57375-8328-4344-b228-2f1ce6efc71e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 439.402s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.848144] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "86c57375-8328-4344-b228-2f1ce6efc71e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.848350] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "86c57375-8328-4344-b228-2f1ce6efc71e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.848516] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "86c57375-8328-4344-b228-2f1ce6efc71e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.850458] env[62740]: INFO nova.compute.manager [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Terminating instance [ 1953.852722] env[62740]: DEBUG nova.compute.manager [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1953.855039] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1953.855039] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e77258ac-1c6f-409a-9c43-45217dc98b69 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.859118] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10ca33f525e3478bb28cf4ddc310ca59 [ 1953.859576] env[62740]: DEBUG nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1953.861279] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 9729cb73859141b78308f037c2576f65 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1953.866387] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa6aecb-6ef9-45eb-9d80-384b474b9080 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.896234] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9729cb73859141b78308f037c2576f65 [ 1953.896743] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 86c57375-8328-4344-b228-2f1ce6efc71e could not be found. [ 1953.896926] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1953.897142] env[62740]: INFO nova.compute.manager [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1953.897433] env[62740]: DEBUG oslo.service.loopingcall [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1953.899712] env[62740]: DEBUG nova.compute.manager [-] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1953.899819] env[62740]: DEBUG nova.network.neutron [-] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1953.913191] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.913464] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.914923] env[62740]: INFO nova.compute.claims [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1953.916521] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 939fbdce13ef4fb1ade6bd52850294d9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1953.928061] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d41d4d8cd63d47cdb39c040d9077fc54 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1953.934122] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d41d4d8cd63d47cdb39c040d9077fc54 [ 1953.934494] env[62740]: DEBUG nova.network.neutron [-] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1953.934864] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 65f98d0a1b2442d1846ba1171b8cc1bd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1953.946480] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65f98d0a1b2442d1846ba1171b8cc1bd [ 1953.946965] env[62740]: INFO nova.compute.manager [-] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] Took 0.05 seconds to deallocate network for instance. [ 1953.950522] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 9f82aa17a14143eeae489b6c0f4dd113 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1953.955806] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 939fbdce13ef4fb1ade6bd52850294d9 [ 1953.957323] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 480cf0f616c542aa9442b6c6fc2fc60a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1953.963600] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 480cf0f616c542aa9442b6c6fc2fc60a [ 1953.978725] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f82aa17a14143eeae489b6c0f4dd113 [ 1953.990710] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 6b8ad737baf044d4814494ecb8923651 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1954.028237] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b8ad737baf044d4814494ecb8923651 [ 1954.030964] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "86c57375-8328-4344-b228-2f1ce6efc71e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.183s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.031299] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2063893a-6abf-47aa-b760-ab1e2c1130eb tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 9d291d449557490d8897d8018b737c2e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1954.031953] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "86c57375-8328-4344-b228-2f1ce6efc71e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 144.784s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1954.032152] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 86c57375-8328-4344-b228-2f1ce6efc71e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1954.032918] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "86c57375-8328-4344-b228-2f1ce6efc71e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.041893] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d291d449557490d8897d8018b737c2e [ 1954.085895] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872bc99b-0023-4f69-a4c3-293c5c51da87 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.093652] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee328f7-aa88-4471-bd84-662dec16a72e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.123743] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741d9412-0527-44df-a2d7-92067de27c9b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.130532] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6331e6a7-7a2a-4a19-818c-69c135843486 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.144081] env[62740]: DEBUG nova.compute.provider_tree [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1954.144562] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 57e4bd80449f4573bf833a1fac2e4db5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1954.151819] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57e4bd80449f4573bf833a1fac2e4db5 [ 1954.152714] env[62740]: DEBUG nova.scheduler.client.report [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1954.155026] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 27510973e0604f12bd565cbc8e7543d8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1954.166714] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27510973e0604f12bd565cbc8e7543d8 [ 1954.167394] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.254s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.167844] env[62740]: DEBUG nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1954.169499] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 20156c32e6f747c7adfa7d816f4dd1ca in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1954.198362] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20156c32e6f747c7adfa7d816f4dd1ca [ 1954.199934] env[62740]: DEBUG nova.compute.utils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1954.200526] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg ace25f6393a94b9db773382a02f32df2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1954.201588] env[62740]: DEBUG nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1954.201682] env[62740]: DEBUG nova.network.neutron [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1954.210896] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ace25f6393a94b9db773382a02f32df2 [ 1954.211441] env[62740]: DEBUG nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1954.212989] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg c6d0fbec2be54806bbb11cb15cb8a7b5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1954.242308] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6d0fbec2be54806bbb11cb15cb8a7b5 [ 1954.244815] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 91c0c85e33a6472393563d40afc18c2b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1954.260386] env[62740]: DEBUG nova.policy [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96885eac4aea4c049695f190c31b8b0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c5b3436283d24d41ae0e599a35d1850c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 1954.273033] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91c0c85e33a6472393563d40afc18c2b [ 1954.274138] env[62740]: DEBUG nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1954.300012] env[62740]: DEBUG nova.virt.hardware [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1954.300246] env[62740]: DEBUG nova.virt.hardware [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1954.300411] env[62740]: DEBUG nova.virt.hardware [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1954.300598] env[62740]: DEBUG nova.virt.hardware [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1954.300749] env[62740]: DEBUG nova.virt.hardware [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1954.300899] env[62740]: DEBUG nova.virt.hardware [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1954.301220] env[62740]: DEBUG nova.virt.hardware [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1954.301434] env[62740]: DEBUG nova.virt.hardware [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1954.301852] env[62740]: DEBUG nova.virt.hardware [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1954.301852] env[62740]: DEBUG nova.virt.hardware [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1954.302022] env[62740]: DEBUG nova.virt.hardware [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1954.302902] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274ced40-ae8f-4b5f-9942-d251c4f5cf15 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.311097] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1bb71e-0051-4400-97b5-8b0b300d1ded {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.558645] env[62740]: DEBUG nova.network.neutron [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Successfully created port: 64e57883-7826-4245-a4c8-c82deb514e5f {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1955.416019] env[62740]: DEBUG nova.network.neutron [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Successfully updated port: 64e57883-7826-4245-a4c8-c82deb514e5f {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1955.416019] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 4079706c050c47139813e7b9708b4a4f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1955.424646] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4079706c050c47139813e7b9708b4a4f [ 1955.425768] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "refresh_cache-1b975b29-fbaa-4385-9bf9-33496b4ed129" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1955.425768] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquired lock "refresh_cache-1b975b29-fbaa-4385-9bf9-33496b4ed129" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1955.425768] env[62740]: DEBUG nova.network.neutron [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1955.426057] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg cefbef0fab284ad9af81ab8a29530bf6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1955.435835] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cefbef0fab284ad9af81ab8a29530bf6 [ 1955.471058] env[62740]: DEBUG nova.network.neutron [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1955.639220] env[62740]: DEBUG nova.network.neutron [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Updating instance_info_cache with network_info: [{"id": "64e57883-7826-4245-a4c8-c82deb514e5f", "address": "fa:16:3e:d9:77:8f", "network": {"id": "2df153a5-47a3-46f8-96b7-aa7f4f657f55", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1427651514-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5b3436283d24d41ae0e599a35d1850c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64e57883-78", "ovs_interfaceid": "64e57883-7826-4245-a4c8-c82deb514e5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1955.639837] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 2f6091992b6442e9ba380b74eb3bed7b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1955.651225] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f6091992b6442e9ba380b74eb3bed7b [ 1955.651811] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Releasing lock "refresh_cache-1b975b29-fbaa-4385-9bf9-33496b4ed129" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1955.652121] env[62740]: DEBUG nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Instance network_info: |[{"id": "64e57883-7826-4245-a4c8-c82deb514e5f", "address": "fa:16:3e:d9:77:8f", "network": {"id": "2df153a5-47a3-46f8-96b7-aa7f4f657f55", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1427651514-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5b3436283d24d41ae0e599a35d1850c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64e57883-78", "ovs_interfaceid": "64e57883-7826-4245-a4c8-c82deb514e5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1955.652518] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:77:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a2b284a-a29c-478f-b763-c9b5821e20ec', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '64e57883-7826-4245-a4c8-c82deb514e5f', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1955.660158] env[62740]: DEBUG oslo.service.loopingcall [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1955.660599] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1955.660825] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83417165-caf8-450c-964e-c378ce9d7ce6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.681560] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1955.681560] env[62740]: value = "task-640341" [ 1955.681560] env[62740]: _type = "Task" [ 1955.681560] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.688779] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640341, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.796186] env[62740]: DEBUG nova.compute.manager [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Received event network-vif-plugged-64e57883-7826-4245-a4c8-c82deb514e5f {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1955.796273] env[62740]: DEBUG oslo_concurrency.lockutils [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] Acquiring lock "1b975b29-fbaa-4385-9bf9-33496b4ed129-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.796488] env[62740]: DEBUG oslo_concurrency.lockutils [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] Lock "1b975b29-fbaa-4385-9bf9-33496b4ed129-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.796605] env[62740]: DEBUG oslo_concurrency.lockutils [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] Lock "1b975b29-fbaa-4385-9bf9-33496b4ed129-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.796789] env[62740]: DEBUG nova.compute.manager [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] No waiting events found dispatching network-vif-plugged-64e57883-7826-4245-a4c8-c82deb514e5f {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1955.796954] env[62740]: WARNING nova.compute.manager [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Received unexpected event network-vif-plugged-64e57883-7826-4245-a4c8-c82deb514e5f for instance with vm_state building and task_state spawning. [ 1955.797310] env[62740]: DEBUG nova.compute.manager [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Received event network-changed-64e57883-7826-4245-a4c8-c82deb514e5f {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1955.797493] env[62740]: DEBUG nova.compute.manager [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Refreshing instance network info cache due to event network-changed-64e57883-7826-4245-a4c8-c82deb514e5f. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1955.797688] env[62740]: DEBUG oslo_concurrency.lockutils [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] Acquiring lock "refresh_cache-1b975b29-fbaa-4385-9bf9-33496b4ed129" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1955.797831] env[62740]: DEBUG oslo_concurrency.lockutils [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] Acquired lock "refresh_cache-1b975b29-fbaa-4385-9bf9-33496b4ed129" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1955.797993] env[62740]: DEBUG nova.network.neutron [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Refreshing network info cache for port 64e57883-7826-4245-a4c8-c82deb514e5f {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1955.798488] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] Expecting reply to msg d25153e720f84f83bf498351437dbc5e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1955.807823] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d25153e720f84f83bf498351437dbc5e [ 1956.183042] env[62740]: DEBUG nova.network.neutron [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Updated VIF entry in instance network info cache for port 64e57883-7826-4245-a4c8-c82deb514e5f. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1956.183427] env[62740]: DEBUG nova.network.neutron [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Updating instance_info_cache with network_info: [{"id": "64e57883-7826-4245-a4c8-c82deb514e5f", "address": "fa:16:3e:d9:77:8f", "network": {"id": "2df153a5-47a3-46f8-96b7-aa7f4f657f55", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1427651514-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5b3436283d24d41ae0e599a35d1850c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64e57883-78", "ovs_interfaceid": "64e57883-7826-4245-a4c8-c82deb514e5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1956.183956] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] Expecting reply to msg 3479ffeda065447cba63149d5ee9194f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1956.194290] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640341, 'name': CreateVM_Task, 'duration_secs': 0.273726} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.194796] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3479ffeda065447cba63149d5ee9194f [ 1956.195167] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1956.195611] env[62740]: DEBUG oslo_concurrency.lockutils [req-43ef2930-c586-45a3-847f-d5fda4b384c7 req-e3dadf09-3f1b-4a51-bdcb-798d687f493d service nova] Releasing lock "refresh_cache-1b975b29-fbaa-4385-9bf9-33496b4ed129" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.196356] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1956.196517] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1956.196924] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1956.197448] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f827a8c-7525-44ed-9c61-3a9b465960de {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.202245] env[62740]: DEBUG oslo_vmware.api [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for the task: (returnval){ [ 1956.202245] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]523e7607-8985-be48-2604-0eae052ac71b" [ 1956.202245] env[62740]: _type = "Task" [ 1956.202245] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.210278] env[62740]: DEBUG oslo_vmware.api [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]523e7607-8985-be48-2604-0eae052ac71b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.712828] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.713159] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1956.713301] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1963.260345] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg c8d755c8be1647468f704e2f42175eea in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1963.273651] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8d755c8be1647468f704e2f42175eea [ 1963.274300] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1970.196101] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 596ced8d55124a3a9ebd7a89fd4815d1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1970.208044] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 596ced8d55124a3a9ebd7a89fd4815d1 [ 1981.890600] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1982.890594] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1982.890952] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1982.890997] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1982.891623] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f392445c9d234c7d997336efe3c645a2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1982.911453] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f392445c9d234c7d997336efe3c645a2 [ 1982.913551] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1982.913706] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1982.913840] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1982.913969] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1982.914107] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1982.914230] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1982.914350] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1982.914468] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1982.914585] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1982.914702] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1982.914821] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1982.915323] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1983.911119] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1984.890135] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1984.890323] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1985.891612] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1985.891939] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 51f6f4c91094450fac443f15f153ff09 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1985.903793] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51f6f4c91094450fac443f15f153ff09 [ 1985.904781] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.904998] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.905188] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.905342] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1985.906521] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe8658a-0ce6-4232-bba8-8d5081d49168 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.915819] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f127cfb-7b17-41a7-921b-b7ff68d8ac8d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.929625] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f303c16-986f-4fc0-af38-122051fb696e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.935690] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877630c9-e40a-4b49-b3dd-1b444feab2e9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.963763] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181666MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1985.963906] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.964115] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.964934] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 18be81da89e04a68b694e84be74097d8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1986.000297] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18be81da89e04a68b694e84be74097d8 [ 1986.007162] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 903acfc292644e66a33822d580e272b3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1986.017851] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 903acfc292644e66a33822d580e272b3 [ 1986.041414] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a41506d2-33b2-40b8-badb-41312c7abbd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.041568] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3aa2858e-d422-408a-a83a-98382f971add actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.041696] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.041818] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.041939] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 9a595940-16ba-401a-922f-331cf87093c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.042068] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d2fb90b7-1618-4f07-8854-81566887a7cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.042188] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 19f27c16-45b3-47d8-acf0-18255844431f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.042302] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4ee71d81-7d8b-42f8-a27c-b4645169fa3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.042415] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.042530] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1b975b29-fbaa-4385-9bf9-33496b4ed129 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.042717] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1986.042853] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1986.164803] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb98a65-9b57-4823-a2a3-bead9212237d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.172383] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2501bb-7d77-4637-af71-5cba39604ff2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.202270] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed8d98f4-6bb1-44f4-b6a3-e53d8a8cfc44 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.208862] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0743b27-6bb1-4081-ad5e-432252503247 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.221370] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1986.221866] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 81e2c69f71a94ecf9755af7021ef6e5d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1986.229719] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81e2c69f71a94ecf9755af7021ef6e5d [ 1986.230662] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1986.233033] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 840b8d1081904e56b34101a938e3b59b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1986.243680] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 840b8d1081904e56b34101a938e3b59b [ 1986.244334] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1986.244540] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.280s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.243736] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1987.244052] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1989.892611] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1992.887054] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1992.887600] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg e2cd1c2f188642c5bf618c8a3e911959 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 1992.904683] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2cd1c2f188642c5bf618c8a3e911959 [ 2001.553352] env[62740]: WARNING oslo_vmware.rw_handles [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2001.553352] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2001.553352] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2001.553352] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2001.553352] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2001.553352] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 2001.553352] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2001.553352] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2001.553352] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2001.553352] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2001.553352] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2001.553352] env[62740]: ERROR oslo_vmware.rw_handles [ 2001.553990] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/4a7f9091-9aa2-421c-a8bc-099381399ae0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2001.556331] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2001.556589] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Copying Virtual Disk [datastore2] vmware_temp/4a7f9091-9aa2-421c-a8bc-099381399ae0/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/4a7f9091-9aa2-421c-a8bc-099381399ae0/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2001.556895] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b86456d9-1d38-4de1-b4c2-6cc2a588b53f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.564203] env[62740]: DEBUG oslo_vmware.api [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Waiting for the task: (returnval){ [ 2001.564203] env[62740]: value = "task-640342" [ 2001.564203] env[62740]: _type = "Task" [ 2001.564203] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2001.572044] env[62740]: DEBUG oslo_vmware.api [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Task: {'id': task-640342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.075262] env[62740]: DEBUG oslo_vmware.exceptions [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2002.075569] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2002.076156] env[62740]: ERROR nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2002.076156] env[62740]: Faults: ['InvalidArgument'] [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Traceback (most recent call last): [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] yield resources [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] self.driver.spawn(context, instance, image_meta, [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] self._fetch_image_if_missing(context, vi) [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] image_cache(vi, tmp_image_ds_loc) [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] vm_util.copy_virtual_disk( [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] session._wait_for_task(vmdk_copy_task) [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] return self.wait_for_task(task_ref) [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] return evt.wait() [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] result = hub.switch() [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] return self.greenlet.switch() [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] self.f(*self.args, **self.kw) [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] raise exceptions.translate_fault(task_info.error) [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Faults: ['InvalidArgument'] [ 2002.076156] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] [ 2002.077249] env[62740]: INFO nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Terminating instance [ 2002.078080] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2002.078296] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2002.078539] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78b5b4d5-fcdc-4124-9e45-4ab0417f6a4f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.080689] env[62740]: DEBUG nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2002.080885] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2002.081635] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6bbd29-eece-4da2-ad78-e8d7a678b538 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.088392] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2002.088596] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd125f59-4b1d-448b-aba1-1b05c0ab7c94 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.090684] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2002.090863] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2002.091826] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fe091c7-8a1a-4ebb-9aa9-35a16094282e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.096378] env[62740]: DEBUG oslo_vmware.api [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for the task: (returnval){ [ 2002.096378] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52a8eaba-085e-52e5-d452-3e3498bb6150" [ 2002.096378] env[62740]: _type = "Task" [ 2002.096378] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2002.104826] env[62740]: DEBUG oslo_vmware.api [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52a8eaba-085e-52e5-d452-3e3498bb6150, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.227836] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2002.228080] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2002.228266] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Deleting the datastore file [datastore2] a41506d2-33b2-40b8-badb-41312c7abbd2 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2002.228628] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f495a9b-1c64-46da-957b-51791c388c37 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.234857] env[62740]: DEBUG oslo_vmware.api [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Waiting for the task: (returnval){ [ 2002.234857] env[62740]: value = "task-640344" [ 2002.234857] env[62740]: _type = "Task" [ 2002.234857] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2002.243691] env[62740]: DEBUG oslo_vmware.api [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Task: {'id': task-640344, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.605940] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2002.606268] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Creating directory with path [datastore2] vmware_temp/9059b745-33a2-46d1-8eb2-8c3987faab33/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2002.606473] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1fbaa96-361c-4a57-baf5-5f39c822c681 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.620060] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Created directory with path [datastore2] vmware_temp/9059b745-33a2-46d1-8eb2-8c3987faab33/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2002.620260] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Fetch image to [datastore2] vmware_temp/9059b745-33a2-46d1-8eb2-8c3987faab33/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2002.620438] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/9059b745-33a2-46d1-8eb2-8c3987faab33/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2002.621212] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec55e435-4305-4052-b5ed-49b9db5b0f00 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.627588] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88725d9-85af-472b-912f-0a915f50d0f0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.636339] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6381ff0f-9ea0-4dfd-9d1b-3185deff3d2e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.666014] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8499f6-98a5-436e-a38d-846092012496 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.671136] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-887f1163-a09e-4c51-9709-53d08f42749a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.692584] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2002.742042] env[62740]: DEBUG oslo_vmware.rw_handles [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9059b745-33a2-46d1-8eb2-8c3987faab33/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2002.746331] env[62740]: DEBUG oslo_vmware.api [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Task: {'id': task-640344, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108019} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2002.747505] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2002.747711] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2002.747892] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2002.748098] env[62740]: INFO nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Took 0.67 seconds to destroy the instance on the hypervisor. [ 2002.803574] env[62740]: DEBUG nova.compute.claims [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2002.803749] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.803984] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.806043] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg d6c444da69fe4e06bc10c2a9c0e57a0a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2002.809216] env[62740]: DEBUG oslo_vmware.rw_handles [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2002.809216] env[62740]: DEBUG oslo_vmware.rw_handles [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9059b745-33a2-46d1-8eb2-8c3987faab33/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2002.843612] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6c444da69fe4e06bc10c2a9c0e57a0a [ 2002.968258] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5dbc5ea-0000-44e6-a466-27027cf233e6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.975126] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6171f447-dfae-408a-9986-a2ffc9c4e049 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.005228] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2a4166-e411-4988-b1bc-45075e921b69 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.011850] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c292068f-9f35-4256-90ad-ebfed33ab1ac {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.024246] env[62740]: DEBUG nova.compute.provider_tree [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2003.024722] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg a3afc6fddaa94d54801a1cbe6b476050 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2003.032155] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3afc6fddaa94d54801a1cbe6b476050 [ 2003.032997] env[62740]: DEBUG nova.scheduler.client.report [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2003.035227] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 30a164ca0572484ea81586e8a069722f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2003.045318] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30a164ca0572484ea81586e8a069722f [ 2003.046018] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.242s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.046550] env[62740]: ERROR nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2003.046550] env[62740]: Faults: ['InvalidArgument'] [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Traceback (most recent call last): [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] self.driver.spawn(context, instance, image_meta, [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] self._fetch_image_if_missing(context, vi) [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] image_cache(vi, tmp_image_ds_loc) [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] vm_util.copy_virtual_disk( [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] session._wait_for_task(vmdk_copy_task) [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] return self.wait_for_task(task_ref) [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] return evt.wait() [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] result = hub.switch() [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] return self.greenlet.switch() [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] self.f(*self.args, **self.kw) [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] raise exceptions.translate_fault(task_info.error) [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Faults: ['InvalidArgument'] [ 2003.046550] env[62740]: ERROR nova.compute.manager [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] [ 2003.047267] env[62740]: DEBUG nova.compute.utils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2003.048644] env[62740]: DEBUG nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Build of instance a41506d2-33b2-40b8-badb-41312c7abbd2 was re-scheduled: A specified parameter was not correct: fileType [ 2003.048644] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2003.049084] env[62740]: DEBUG nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2003.049267] env[62740]: DEBUG nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2003.049435] env[62740]: DEBUG nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2003.049601] env[62740]: DEBUG nova.network.neutron [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2003.926259] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg c2f05f5733a242e3b93d709e8492fe7c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2003.935952] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2f05f5733a242e3b93d709e8492fe7c [ 2003.936556] env[62740]: DEBUG nova.network.neutron [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2003.937042] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 25fbd38939a44c17b171b6218e61ea0c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2003.945801] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25fbd38939a44c17b171b6218e61ea0c [ 2003.946397] env[62740]: INFO nova.compute.manager [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Took 0.90 seconds to deallocate network for instance. [ 2003.948027] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg e74b3825a91d48448aa6b0e28072c9ec in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2003.983027] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e74b3825a91d48448aa6b0e28072c9ec [ 2003.985756] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 0f3f411e78d845818e736d4ecf7afc52 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2004.018844] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f3f411e78d845818e736d4ecf7afc52 [ 2004.050913] env[62740]: INFO nova.scheduler.client.report [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Deleted allocations for instance a41506d2-33b2-40b8-badb-41312c7abbd2 [ 2004.058175] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 476091fbfa2d47c4a2819df133336b5a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2004.078438] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 476091fbfa2d47c4a2819df133336b5a [ 2004.078438] env[62740]: DEBUG oslo_concurrency.lockutils [None req-bbc2cd97-4729-443a-b5dd-4b3332adebf0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "a41506d2-33b2-40b8-badb-41312c7abbd2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 629.253s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2004.078438] env[62740]: DEBUG oslo_concurrency.lockutils [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "a41506d2-33b2-40b8-badb-41312c7abbd2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 433.620s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2004.078438] env[62740]: DEBUG oslo_concurrency.lockutils [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "a41506d2-33b2-40b8-badb-41312c7abbd2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2004.078438] env[62740]: DEBUG oslo_concurrency.lockutils [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "a41506d2-33b2-40b8-badb-41312c7abbd2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2004.078438] env[62740]: DEBUG oslo_concurrency.lockutils [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "a41506d2-33b2-40b8-badb-41312c7abbd2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2004.080429] env[62740]: INFO nova.compute.manager [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Terminating instance [ 2004.082300] env[62740]: DEBUG nova.compute.manager [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2004.082499] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2004.082976] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2528e6a5-e399-4323-be56-f187f41ca00e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.092898] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ed4bdc-c621-4db8-bef8-d207dcf5232b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.120121] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a41506d2-33b2-40b8-badb-41312c7abbd2 could not be found. [ 2004.120326] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2004.120505] env[62740]: INFO nova.compute.manager [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2004.120749] env[62740]: DEBUG oslo.service.loopingcall [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2004.120962] env[62740]: DEBUG nova.compute.manager [-] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2004.121073] env[62740]: DEBUG nova.network.neutron [-] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2004.139402] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 73537801ec104b0cab7cfcf8e7cfdd3c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2004.145377] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73537801ec104b0cab7cfcf8e7cfdd3c [ 2004.145719] env[62740]: DEBUG nova.network.neutron [-] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2004.146090] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 852690f842184b9b8d896f0fcdf887d2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2004.153277] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 852690f842184b9b8d896f0fcdf887d2 [ 2004.153727] env[62740]: INFO nova.compute.manager [-] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] Took 0.03 seconds to deallocate network for instance. [ 2004.157037] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 1cbc4686f09b4619a64e63060f26fa44 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2004.182561] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cbc4686f09b4619a64e63060f26fa44 [ 2004.196055] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 3cac082580f14714924689ef7be46f0d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2004.234350] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3cac082580f14714924689ef7be46f0d [ 2004.236996] env[62740]: DEBUG oslo_concurrency.lockutils [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "a41506d2-33b2-40b8-badb-41312c7abbd2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.160s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2004.237371] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-681ae6d4-3a31-4a41-806f-7f84608cc6a0 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg de067da564924c5886326cb81170d7ab in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2004.238399] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "a41506d2-33b2-40b8-badb-41312c7abbd2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 194.990s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2004.238621] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a41506d2-33b2-40b8-badb-41312c7abbd2] During sync_power_state the instance has a pending task (deleting). Skip. [ 2004.238867] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "a41506d2-33b2-40b8-badb-41312c7abbd2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2004.246847] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de067da564924c5886326cb81170d7ab [ 2010.297887] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "b75490e6-ded3-4aa7-89ff-f4963fe82cfe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.298189] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "b75490e6-ded3-4aa7-89ff-f4963fe82cfe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2010.298607] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg fbc42d2e1ee54076863440b53b9f19d7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2010.309462] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbc42d2e1ee54076863440b53b9f19d7 [ 2010.309951] env[62740]: DEBUG nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2010.311556] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 5127d47de043418992f3633181e1d7d9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2010.339413] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5127d47de043418992f3633181e1d7d9 [ 2010.354484] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.354727] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2010.356167] env[62740]: INFO nova.compute.claims [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2010.357747] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg bc74b51668d744f490de4086bc7ae29e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2010.387113] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc74b51668d744f490de4086bc7ae29e [ 2010.388538] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg b899264f198246c4b1ebdef97896ff00 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2010.395606] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b899264f198246c4b1ebdef97896ff00 [ 2010.513546] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9352a088-3b31-4321-84b6-d5273a175d01 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.521862] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994edd42-692f-4a41-b224-56a9ee49d9e3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.552555] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74094437-5f18-4aaa-a6e2-13e97b5e0ea6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.559757] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c385309b-fa35-40f6-8b89-731c8316859a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.573496] env[62740]: DEBUG nova.compute.provider_tree [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2010.573966] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 07228db8704a45099ae30e4d279f7f01 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2010.581553] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07228db8704a45099ae30e4d279f7f01 [ 2010.582411] env[62740]: DEBUG nova.scheduler.client.report [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2010.584588] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 06a0a52745d849e19b47af5b85eae1b9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2010.595605] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06a0a52745d849e19b47af5b85eae1b9 [ 2010.596322] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.242s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2010.596856] env[62740]: DEBUG nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2010.598465] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg c562b65db6d64da4b57a1f687fafcb53 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2010.649605] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c562b65db6d64da4b57a1f687fafcb53 [ 2010.651364] env[62740]: DEBUG nova.compute.utils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2010.651970] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 3e832c6ec0a34b6ebe9aab1f5a0ac2a7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2010.652995] env[62740]: DEBUG nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2010.653189] env[62740]: DEBUG nova.network.neutron [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2010.662285] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e832c6ec0a34b6ebe9aab1f5a0ac2a7 [ 2010.662835] env[62740]: DEBUG nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2010.664708] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg ac6a99ae602b447abcfe57ac80e6bcf4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2010.701081] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac6a99ae602b447abcfe57ac80e6bcf4 [ 2010.703938] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg e294de28c12b4035a42024b3d7f5d740 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2010.727098] env[62740]: DEBUG nova.policy [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd3f5aaf4abae42da9a5ad7044f84647d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d3897fd0522431c87c8830678fd59ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 2010.732158] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e294de28c12b4035a42024b3d7f5d740 [ 2010.733223] env[62740]: DEBUG nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2010.758553] env[62740]: DEBUG nova.virt.hardware [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2010.761203] env[62740]: DEBUG nova.virt.hardware [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2010.761203] env[62740]: DEBUG nova.virt.hardware [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2010.761203] env[62740]: DEBUG nova.virt.hardware [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2010.761203] env[62740]: DEBUG nova.virt.hardware [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2010.761203] env[62740]: DEBUG nova.virt.hardware [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2010.761203] env[62740]: DEBUG nova.virt.hardware [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2010.761203] env[62740]: DEBUG nova.virt.hardware [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2010.761203] env[62740]: DEBUG nova.virt.hardware [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2010.761203] env[62740]: DEBUG nova.virt.hardware [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2010.761203] env[62740]: DEBUG nova.virt.hardware [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2010.761985] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c279cb-ae4f-4dde-9495-5ca41ff6b46d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.771096] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b782c70c-abfe-489e-8482-9419f7514d14 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.106564] env[62740]: DEBUG nova.network.neutron [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Successfully created port: c45929f6-3d1a-46ad-8660-fb37509d331c {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2011.701277] env[62740]: DEBUG nova.compute.manager [req-0cea1b84-d7b0-4667-9f38-5b06ad7041a5 req-b73cb55e-24b2-4e45-b63a-eb24021990bb service nova] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Received event network-vif-plugged-c45929f6-3d1a-46ad-8660-fb37509d331c {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2011.701526] env[62740]: DEBUG oslo_concurrency.lockutils [req-0cea1b84-d7b0-4667-9f38-5b06ad7041a5 req-b73cb55e-24b2-4e45-b63a-eb24021990bb service nova] Acquiring lock "b75490e6-ded3-4aa7-89ff-f4963fe82cfe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2011.701705] env[62740]: DEBUG oslo_concurrency.lockutils [req-0cea1b84-d7b0-4667-9f38-5b06ad7041a5 req-b73cb55e-24b2-4e45-b63a-eb24021990bb service nova] Lock "b75490e6-ded3-4aa7-89ff-f4963fe82cfe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2011.701876] env[62740]: DEBUG oslo_concurrency.lockutils [req-0cea1b84-d7b0-4667-9f38-5b06ad7041a5 req-b73cb55e-24b2-4e45-b63a-eb24021990bb service nova] Lock "b75490e6-ded3-4aa7-89ff-f4963fe82cfe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2011.702055] env[62740]: DEBUG nova.compute.manager [req-0cea1b84-d7b0-4667-9f38-5b06ad7041a5 req-b73cb55e-24b2-4e45-b63a-eb24021990bb service nova] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] No waiting events found dispatching network-vif-plugged-c45929f6-3d1a-46ad-8660-fb37509d331c {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2011.702222] env[62740]: WARNING nova.compute.manager [req-0cea1b84-d7b0-4667-9f38-5b06ad7041a5 req-b73cb55e-24b2-4e45-b63a-eb24021990bb service nova] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Received unexpected event network-vif-plugged-c45929f6-3d1a-46ad-8660-fb37509d331c for instance with vm_state building and task_state spawning. [ 2011.832904] env[62740]: DEBUG nova.network.neutron [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Successfully updated port: c45929f6-3d1a-46ad-8660-fb37509d331c {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2011.832904] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg b4dfa18b33cd4208b911515a98b21b68 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2011.842482] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4dfa18b33cd4208b911515a98b21b68 [ 2011.843316] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "refresh_cache-b75490e6-ded3-4aa7-89ff-f4963fe82cfe" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2011.843468] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquired lock "refresh_cache-b75490e6-ded3-4aa7-89ff-f4963fe82cfe" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2011.843618] env[62740]: DEBUG nova.network.neutron [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2011.844020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg b430623e8beb4f339aa705825d1cdae8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2011.856659] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b430623e8beb4f339aa705825d1cdae8 [ 2011.888857] env[62740]: DEBUG nova.network.neutron [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2012.048501] env[62740]: DEBUG nova.network.neutron [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Updating instance_info_cache with network_info: [{"id": "c45929f6-3d1a-46ad-8660-fb37509d331c", "address": "fa:16:3e:e6:a9:a4", "network": {"id": "fe48b87d-d6bf-41e3-8587-388615fdb42f", "bridge": "br-int", "label": "tempest-ServersTestJSON-590567929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3897fd0522431c87c8830678fd59ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc45929f6-3d", "ovs_interfaceid": "c45929f6-3d1a-46ad-8660-fb37509d331c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2012.049214] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 3e1365b505154a3ea8d17c3599a4cd40 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2012.059646] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e1365b505154a3ea8d17c3599a4cd40 [ 2012.060352] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Releasing lock "refresh_cache-b75490e6-ded3-4aa7-89ff-f4963fe82cfe" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2012.060637] env[62740]: DEBUG nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Instance network_info: |[{"id": "c45929f6-3d1a-46ad-8660-fb37509d331c", "address": "fa:16:3e:e6:a9:a4", "network": {"id": "fe48b87d-d6bf-41e3-8587-388615fdb42f", "bridge": "br-int", "label": "tempest-ServersTestJSON-590567929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3897fd0522431c87c8830678fd59ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc45929f6-3d", "ovs_interfaceid": "c45929f6-3d1a-46ad-8660-fb37509d331c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2012.061040] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:a9:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c45929f6-3d1a-46ad-8660-fb37509d331c', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2012.068589] env[62740]: DEBUG oslo.service.loopingcall [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2012.069122] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2012.069753] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7051d7fb-a5ad-4070-983d-c247f41b503b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.089350] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2012.089350] env[62740]: value = "task-640345" [ 2012.089350] env[62740]: _type = "Task" [ 2012.089350] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.097242] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640345, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.600272] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640345, 'name': CreateVM_Task, 'duration_secs': 0.310296} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.600424] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2012.601235] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.601419] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.601730] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2012.601996] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d0c2883-b84c-40f2-8ae5-1c791bf3a55c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.606700] env[62740]: DEBUG oslo_vmware.api [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Waiting for the task: (returnval){ [ 2012.606700] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52726a40-889d-c878-eb13-1c2e9b0e30ae" [ 2012.606700] env[62740]: _type = "Task" [ 2012.606700] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.615884] env[62740]: DEBUG oslo_vmware.api [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52726a40-889d-c878-eb13-1c2e9b0e30ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.116894] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2013.117874] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2013.117874] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2013.728139] env[62740]: DEBUG nova.compute.manager [req-1a403ae4-0725-402f-b5da-9b140e5516cc req-3924a570-6232-40ae-b55c-819794031126 service nova] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Received event network-changed-c45929f6-3d1a-46ad-8660-fb37509d331c {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2013.728355] env[62740]: DEBUG nova.compute.manager [req-1a403ae4-0725-402f-b5da-9b140e5516cc req-3924a570-6232-40ae-b55c-819794031126 service nova] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Refreshing instance network info cache due to event network-changed-c45929f6-3d1a-46ad-8660-fb37509d331c. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2013.728575] env[62740]: DEBUG oslo_concurrency.lockutils [req-1a403ae4-0725-402f-b5da-9b140e5516cc req-3924a570-6232-40ae-b55c-819794031126 service nova] Acquiring lock "refresh_cache-b75490e6-ded3-4aa7-89ff-f4963fe82cfe" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2013.728731] env[62740]: DEBUG oslo_concurrency.lockutils [req-1a403ae4-0725-402f-b5da-9b140e5516cc req-3924a570-6232-40ae-b55c-819794031126 service nova] Acquired lock "refresh_cache-b75490e6-ded3-4aa7-89ff-f4963fe82cfe" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2013.728957] env[62740]: DEBUG nova.network.neutron [req-1a403ae4-0725-402f-b5da-9b140e5516cc req-3924a570-6232-40ae-b55c-819794031126 service nova] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Refreshing network info cache for port c45929f6-3d1a-46ad-8660-fb37509d331c {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2013.729461] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-1a403ae4-0725-402f-b5da-9b140e5516cc req-3924a570-6232-40ae-b55c-819794031126 service nova] Expecting reply to msg a4e5c8b49c6e4928ae087171ec727316 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2013.736089] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4e5c8b49c6e4928ae087171ec727316 [ 2014.023756] env[62740]: DEBUG nova.network.neutron [req-1a403ae4-0725-402f-b5da-9b140e5516cc req-3924a570-6232-40ae-b55c-819794031126 service nova] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Updated VIF entry in instance network info cache for port c45929f6-3d1a-46ad-8660-fb37509d331c. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2014.024075] env[62740]: DEBUG nova.network.neutron [req-1a403ae4-0725-402f-b5da-9b140e5516cc req-3924a570-6232-40ae-b55c-819794031126 service nova] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Updating instance_info_cache with network_info: [{"id": "c45929f6-3d1a-46ad-8660-fb37509d331c", "address": "fa:16:3e:e6:a9:a4", "network": {"id": "fe48b87d-d6bf-41e3-8587-388615fdb42f", "bridge": "br-int", "label": "tempest-ServersTestJSON-590567929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3897fd0522431c87c8830678fd59ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc45929f6-3d", "ovs_interfaceid": "c45929f6-3d1a-46ad-8660-fb37509d331c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2014.024613] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-1a403ae4-0725-402f-b5da-9b140e5516cc req-3924a570-6232-40ae-b55c-819794031126 service nova] Expecting reply to msg 36eeb24569dd42079b8563fbd6ce8ae6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2014.033026] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36eeb24569dd42079b8563fbd6ce8ae6 [ 2014.033556] env[62740]: DEBUG oslo_concurrency.lockutils [req-1a403ae4-0725-402f-b5da-9b140e5516cc req-3924a570-6232-40ae-b55c-819794031126 service nova] Releasing lock "refresh_cache-b75490e6-ded3-4aa7-89ff-f4963fe82cfe" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.797553] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 3a49239d6eab45ce9310cd53b41b94d6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2019.810717] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a49239d6eab45ce9310cd53b41b94d6 [ 2019.811282] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "1b975b29-fbaa-4385-9bf9-33496b4ed129" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2042.891842] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2043.886239] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2043.889753] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2043.889950] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2043.890106] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2043.890684] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f2a83bcaacf945339e186b32d83d1c9f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2043.910909] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2a83bcaacf945339e186b32d83d1c9f [ 2043.913107] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2043.913262] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2043.913392] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2043.913519] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2043.913642] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2043.913770] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2043.913905] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2043.914051] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2043.914182] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2043.914303] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2043.914424] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2043.914891] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2044.890782] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2044.890946] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2045.891182] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2045.891558] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg db4ca832ba6c49b5a91d838419d5c53c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2045.902641] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db4ca832ba6c49b5a91d838419d5c53c [ 2045.903705] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2045.903943] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2045.904128] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2045.904285] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2045.905398] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9955aaf7-b67e-4375-9ca4-860e1b6e0408 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.913976] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941c614d-c559-415c-b102-895f70a6d3bd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.928909] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3480de89-3c3c-40fb-8794-40424c564e85 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.935133] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e480faf-8e15-4ae1-a353-191d20f0d889 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.963378] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181648MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2045.963563] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2045.963696] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2045.964542] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg d30e48002bed4197a377b854221f9712 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2046.000130] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d30e48002bed4197a377b854221f9712 [ 2046.004157] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b635030d0dfe4f6f81eaaab35bfd7775 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2046.013704] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b635030d0dfe4f6f81eaaab35bfd7775 [ 2046.056945] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 3aa2858e-d422-408a-a83a-98382f971add actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.057233] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.057465] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.057687] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 9a595940-16ba-401a-922f-331cf87093c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.057909] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d2fb90b7-1618-4f07-8854-81566887a7cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.058153] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 19f27c16-45b3-47d8-acf0-18255844431f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.058392] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4ee71d81-7d8b-42f8-a27c-b4645169fa3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.058603] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.058815] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1b975b29-fbaa-4385-9bf9-33496b4ed129 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.059086] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b75490e6-ded3-4aa7-89ff-f4963fe82cfe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.059418] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2046.059611] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2046.174771] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b8137e-f4de-4659-8cf0-ab1a88dd1522 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.182667] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6adf484-8a0f-442a-99a6-cd1f9e746520 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.212667] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81584b96-3489-4793-8b49-63ce5220e285 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.219569] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513b0d85-c6ab-4b42-bd2e-d0cb7ad8eff6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.232444] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2046.232898] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 191637a0f0354234ac656ba477023738 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2046.239771] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 191637a0f0354234ac656ba477023738 [ 2046.240633] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2046.242802] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 8c042691727246b48a46703ad8eb99de in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2046.253936] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c042691727246b48a46703ad8eb99de [ 2046.254580] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2046.254762] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.291s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2048.254162] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2048.890974] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2049.115843] env[62740]: WARNING oslo_vmware.rw_handles [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2049.115843] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2049.115843] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2049.115843] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2049.115843] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2049.115843] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 2049.115843] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2049.115843] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2049.115843] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2049.115843] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2049.115843] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2049.115843] env[62740]: ERROR oslo_vmware.rw_handles [ 2049.116459] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/9059b745-33a2-46d1-8eb2-8c3987faab33/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2049.118857] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2049.119140] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Copying Virtual Disk [datastore2] vmware_temp/9059b745-33a2-46d1-8eb2-8c3987faab33/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/9059b745-33a2-46d1-8eb2-8c3987faab33/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2049.119428] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86293e44-fd37-432c-b81c-6b45a2337807 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.127392] env[62740]: DEBUG oslo_vmware.api [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for the task: (returnval){ [ 2049.127392] env[62740]: value = "task-640346" [ 2049.127392] env[62740]: _type = "Task" [ 2049.127392] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.134778] env[62740]: DEBUG oslo_vmware.api [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Task: {'id': task-640346, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.639272] env[62740]: DEBUG oslo_vmware.exceptions [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2049.639659] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2049.640165] env[62740]: ERROR nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2049.640165] env[62740]: Faults: ['InvalidArgument'] [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] Traceback (most recent call last): [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] yield resources [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] self.driver.spawn(context, instance, image_meta, [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] self._fetch_image_if_missing(context, vi) [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] image_cache(vi, tmp_image_ds_loc) [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] vm_util.copy_virtual_disk( [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] session._wait_for_task(vmdk_copy_task) [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] return self.wait_for_task(task_ref) [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] return evt.wait() [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] result = hub.switch() [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] return self.greenlet.switch() [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] self.f(*self.args, **self.kw) [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] raise exceptions.translate_fault(task_info.error) [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] Faults: ['InvalidArgument'] [ 2049.640165] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] [ 2049.641066] env[62740]: INFO nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Terminating instance [ 2049.642162] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2049.642371] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2049.642611] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-841170f7-54e9-4b57-acb8-e87ae5480a01 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.645828] env[62740]: DEBUG nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2049.646038] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2049.646777] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c5920c-cdc6-4e7f-95c8-d956f46ca746 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.654433] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2049.655152] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfa91691-34cc-4945-ad32-eb151edf99a1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.656554] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2049.656742] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2049.657429] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef88c250-1a29-487d-95eb-71c16a16e6bf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.662195] env[62740]: DEBUG oslo_vmware.api [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for the task: (returnval){ [ 2049.662195] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]525230ca-733e-35ff-8fc8-561447b14c5b" [ 2049.662195] env[62740]: _type = "Task" [ 2049.662195] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.669513] env[62740]: DEBUG oslo_vmware.api [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]525230ca-733e-35ff-8fc8-561447b14c5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.721263] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2049.721263] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2049.721263] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Deleting the datastore file [datastore2] 3aa2858e-d422-408a-a83a-98382f971add {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2049.721443] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf2448a3-d072-4112-8c24-b7943b889b9d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.728750] env[62740]: DEBUG oslo_vmware.api [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for the task: (returnval){ [ 2049.728750] env[62740]: value = "task-640348" [ 2049.728750] env[62740]: _type = "Task" [ 2049.728750] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.736469] env[62740]: DEBUG oslo_vmware.api [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Task: {'id': task-640348, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.172312] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2050.172565] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Creating directory with path [datastore2] vmware_temp/82bc99da-b76b-4cde-b405-e891d9fd59bd/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2050.172799] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1a1308b-e73f-48b6-a2b4-2e0c85904552 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.184299] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Created directory with path [datastore2] vmware_temp/82bc99da-b76b-4cde-b405-e891d9fd59bd/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2050.184480] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Fetch image to [datastore2] vmware_temp/82bc99da-b76b-4cde-b405-e891d9fd59bd/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2050.184651] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/82bc99da-b76b-4cde-b405-e891d9fd59bd/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2050.185391] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d25022b-4ad2-4920-a23e-aafb3be6a131 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.191588] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82597e47-6f0c-431a-9814-a3d8fb0a842d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.200226] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace061c9-dc72-4bcf-b51b-2f430932a7b4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.233489] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc425a7c-32ea-4828-a57b-98a5597d4bba {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.241606] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-55b46881-e5d6-4c38-884f-d722ce9848ab {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.243182] env[62740]: DEBUG oslo_vmware.api [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Task: {'id': task-640348, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080165} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2050.243411] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2050.243590] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2050.243763] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2050.243935] env[62740]: INFO nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2050.246087] env[62740]: DEBUG nova.compute.claims [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2050.246274] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2050.246487] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2050.248534] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 3ab652b6800d43f89d004915cb29a66c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2050.266911] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2050.288044] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ab652b6800d43f89d004915cb29a66c [ 2050.321080] env[62740]: DEBUG oslo_vmware.rw_handles [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/82bc99da-b76b-4cde-b405-e891d9fd59bd/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2050.382256] env[62740]: DEBUG oslo_vmware.rw_handles [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2050.382454] env[62740]: DEBUG oslo_vmware.rw_handles [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/82bc99da-b76b-4cde-b405-e891d9fd59bd/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2050.524103] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5976bc65-0d09-43cf-a18d-be4cb9b2d231 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.531973] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8600896-e8c3-4664-b1db-1f8f75c6f1ea {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.560952] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5e969f-138f-4637-a5c5-ca99f652fb6f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.567940] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8dcbb4-9f26-4d51-8a3a-a4b26418649f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.582100] env[62740]: DEBUG nova.compute.provider_tree [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2050.582555] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg fe35a28c5ef347c088de5439b90ae392 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2050.591122] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe35a28c5ef347c088de5439b90ae392 [ 2050.592032] env[62740]: DEBUG nova.scheduler.client.report [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2050.594266] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 694691d49479442dbfe1d235684d7abe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2050.606432] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 694691d49479442dbfe1d235684d7abe [ 2050.607133] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.361s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.607646] env[62740]: ERROR nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2050.607646] env[62740]: Faults: ['InvalidArgument'] [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] Traceback (most recent call last): [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] self.driver.spawn(context, instance, image_meta, [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] self._fetch_image_if_missing(context, vi) [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] image_cache(vi, tmp_image_ds_loc) [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] vm_util.copy_virtual_disk( [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] session._wait_for_task(vmdk_copy_task) [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] return self.wait_for_task(task_ref) [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] return evt.wait() [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] result = hub.switch() [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] return self.greenlet.switch() [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] self.f(*self.args, **self.kw) [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] raise exceptions.translate_fault(task_info.error) [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] Faults: ['InvalidArgument'] [ 2050.607646] env[62740]: ERROR nova.compute.manager [instance: 3aa2858e-d422-408a-a83a-98382f971add] [ 2050.608414] env[62740]: DEBUG nova.compute.utils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2050.609822] env[62740]: DEBUG nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Build of instance 3aa2858e-d422-408a-a83a-98382f971add was re-scheduled: A specified parameter was not correct: fileType [ 2050.609822] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2050.610218] env[62740]: DEBUG nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2050.610391] env[62740]: DEBUG nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2050.610558] env[62740]: DEBUG nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2050.610733] env[62740]: DEBUG nova.network.neutron [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2050.890301] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2050.890702] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Cleaning up deleted instances {{(pid=62740) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11217}} [ 2050.891183] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b3b723348ada4550851c479e7dbf65e3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2050.901463] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3b723348ada4550851c479e7dbf65e3 [ 2050.902047] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] There are 0 instances to clean {{(pid=62740) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 2051.260102] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg ac4f8e0466614535b91f2a3cf29b95d8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2051.273915] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac4f8e0466614535b91f2a3cf29b95d8 [ 2051.274550] env[62740]: DEBUG nova.network.neutron [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2051.275018] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg ac7e98077e094bb7a1d5a47c40ada9cb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2051.291886] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac7e98077e094bb7a1d5a47c40ada9cb [ 2051.292518] env[62740]: INFO nova.compute.manager [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Took 0.68 seconds to deallocate network for instance. [ 2051.294402] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 82e9b6d3c5de4a479fa0075e4c68aae2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2051.326514] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82e9b6d3c5de4a479fa0075e4c68aae2 [ 2051.330447] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 7e532308ab664ac880e1614d6e93ef35 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2051.360031] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e532308ab664ac880e1614d6e93ef35 [ 2051.383932] env[62740]: INFO nova.scheduler.client.report [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Deleted allocations for instance 3aa2858e-d422-408a-a83a-98382f971add [ 2051.390174] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg be7928e93fde4e98b762a450002b9336 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2051.408225] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be7928e93fde4e98b762a450002b9336 [ 2051.408899] env[62740]: DEBUG oslo_concurrency.lockutils [None req-3af18e6e-bfd4-4ec5-9047-eb0f5873d103 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "3aa2858e-d422-408a-a83a-98382f971add" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 621.777s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.409149] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "3aa2858e-d422-408a-a83a-98382f971add" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 425.310s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.409375] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "3aa2858e-d422-408a-a83a-98382f971add-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.409585] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "3aa2858e-d422-408a-a83a-98382f971add-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.410904] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "3aa2858e-d422-408a-a83a-98382f971add-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.411864] env[62740]: INFO nova.compute.manager [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Terminating instance [ 2051.413709] env[62740]: DEBUG nova.compute.manager [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2051.414036] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2051.414446] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b109781-519c-491f-8c3a-f54a4895aaa3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.423994] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293e8223-07f2-45ad-9e74-901ab2f97329 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.451221] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3aa2858e-d422-408a-a83a-98382f971add could not be found. [ 2051.451446] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2051.451629] env[62740]: INFO nova.compute.manager [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2051.451877] env[62740]: DEBUG oslo.service.loopingcall [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2051.452110] env[62740]: DEBUG nova.compute.manager [-] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2051.452208] env[62740]: DEBUG nova.network.neutron [-] [instance: 3aa2858e-d422-408a-a83a-98382f971add] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2051.478011] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e98d7855c72e4c8a9567cdc5a7cd5cfe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2051.488519] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e98d7855c72e4c8a9567cdc5a7cd5cfe [ 2051.488902] env[62740]: DEBUG nova.network.neutron [-] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2051.489304] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a0b6a244e99c4801b7e728b15aea142b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2051.496418] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0b6a244e99c4801b7e728b15aea142b [ 2051.496871] env[62740]: INFO nova.compute.manager [-] [instance: 3aa2858e-d422-408a-a83a-98382f971add] Took 0.04 seconds to deallocate network for instance. [ 2051.500309] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 3373fcd6ac2e4231a790c075df08466e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2051.526046] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3373fcd6ac2e4231a790c075df08466e [ 2051.541278] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg e83a9a8d508a4d70b9fe431c87a8d3ba in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2051.586718] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e83a9a8d508a4d70b9fe431c87a8d3ba [ 2051.589638] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "3aa2858e-d422-408a-a83a-98382f971add" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.180s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.590118] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b831c5d2-2702-484d-898c-322fb0333791 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 874a48eafbdb4c4ba4c010ed56e41ab7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2051.590922] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "3aa2858e-d422-408a-a83a-98382f971add" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 242.342s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.591110] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 3aa2858e-d422-408a-a83a-98382f971add] During sync_power_state the instance has a pending task (deleting). Skip. [ 2051.591290] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "3aa2858e-d422-408a-a83a-98382f971add" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.600786] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 874a48eafbdb4c4ba4c010ed56e41ab7 [ 2051.890363] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2051.890740] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2051.890781] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Cleaning up deleted instances with incomplete migration {{(pid=62740) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11255}} [ 2051.891094] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 47228cc6f9974dd5acbc842f1b8bf47c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2051.898523] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47228cc6f9974dd5acbc842f1b8bf47c [ 2062.891048] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2062.891474] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2a9caa26081e47e0b6691d8f2b7ca583 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2062.898266] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a9caa26081e47e0b6691d8f2b7ca583 [ 2063.226363] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquiring lock "22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2063.226600] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Lock "22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2063.227112] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg f1c12d82cba44158a468c51be77f4ce4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2063.236689] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1c12d82cba44158a468c51be77f4ce4 [ 2063.237131] env[62740]: DEBUG nova.compute.manager [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2063.238689] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 89ef9868016d4826bd49b244d8b592eb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2063.267791] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89ef9868016d4826bd49b244d8b592eb [ 2063.283123] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2063.283367] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2063.284766] env[62740]: INFO nova.compute.claims [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2063.286290] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg d0292e4fd3064048a25ad693f8d93e40 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2063.322447] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0292e4fd3064048a25ad693f8d93e40 [ 2063.324476] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 238b1913c7a64e0abcf0c6a84c076056 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2063.331455] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 238b1913c7a64e0abcf0c6a84c076056 [ 2063.447198] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb64428-3ac5-4953-be8b-dc07b88b0768 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.455101] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0786eeb5-c658-4606-808e-cd63955e4d46 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.483991] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449a166e-1b5e-48c6-aac2-513f8d9c79ce {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.491283] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3abe45-50ef-437c-a884-676aafc22e8c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.505296] env[62740]: DEBUG nova.compute.provider_tree [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2063.505780] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 488dd055bcbe41888cd8fcdff5073097 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2063.513548] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 488dd055bcbe41888cd8fcdff5073097 [ 2063.514443] env[62740]: DEBUG nova.scheduler.client.report [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2063.516933] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 41c187e50089464a808bdd31d500e519 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2063.532230] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41c187e50089464a808bdd31d500e519 [ 2063.533197] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.250s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.533815] env[62740]: DEBUG nova.compute.manager [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2063.535905] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg fd6dd902b0de479c8e01381c8f647411 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2063.582651] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd6dd902b0de479c8e01381c8f647411 [ 2063.584810] env[62740]: DEBUG nova.compute.utils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2063.585259] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg fa831116179941c5abdefb1376883f96 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2063.586103] env[62740]: DEBUG nova.compute.manager [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Not allocating networking since 'none' was specified. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 2063.595285] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa831116179941c5abdefb1376883f96 [ 2063.595839] env[62740]: DEBUG nova.compute.manager [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2063.597649] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 4afa736a509043a99f187ce53f32f363 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2063.627417] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4afa736a509043a99f187ce53f32f363 [ 2063.631020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 2be3611e54c6484d84301f836d5fe005 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2063.659459] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2be3611e54c6484d84301f836d5fe005 [ 2063.660572] env[62740]: DEBUG nova.compute.manager [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2063.684222] env[62740]: DEBUG nova.virt.hardware [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2063.684464] env[62740]: DEBUG nova.virt.hardware [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2063.684624] env[62740]: DEBUG nova.virt.hardware [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2063.684809] env[62740]: DEBUG nova.virt.hardware [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2063.684956] env[62740]: DEBUG nova.virt.hardware [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2063.685122] env[62740]: DEBUG nova.virt.hardware [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2063.685326] env[62740]: DEBUG nova.virt.hardware [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2063.685488] env[62740]: DEBUG nova.virt.hardware [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2063.685651] env[62740]: DEBUG nova.virt.hardware [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2063.685811] env[62740]: DEBUG nova.virt.hardware [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2063.685983] env[62740]: DEBUG nova.virt.hardware [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2063.686845] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d34c78-4c5c-4341-9f94-6897d23e42b2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.694331] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2b59fb-8baa-4255-a6b5-1aa61209fa87 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.707466] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Instance VIF info [] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2063.712956] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Creating folder: Project (1d8acec5fffc4964b98c3ffd2bf97f82). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2063.713212] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c57d1abf-a708-4f9d-8ecd-ac89bfa277b8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.722654] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Created folder: Project (1d8acec5fffc4964b98c3ffd2bf97f82) in parent group-v156037. [ 2063.722832] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Creating folder: Instances. Parent ref: group-v156183. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2063.723065] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ec23d86d-b3e2-49f2-a358-e0c128ac29e7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.731428] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Created folder: Instances in parent group-v156183. [ 2063.731643] env[62740]: DEBUG oslo.service.loopingcall [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2063.731838] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2063.732040] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2299f25e-efa4-4416-9b68-d1f6e768679c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.748053] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2063.748053] env[62740]: value = "task-640351" [ 2063.748053] env[62740]: _type = "Task" [ 2063.748053] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.754763] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640351, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.257731] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640351, 'name': CreateVM_Task, 'duration_secs': 0.264146} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.258064] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2064.258282] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2064.258443] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2064.258793] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2064.259028] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c9f5bcd-c433-4bee-8b71-b31885121150 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.263210] env[62740]: DEBUG oslo_vmware.api [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Waiting for the task: (returnval){ [ 2064.263210] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5226fbaa-f701-2882-976a-67a9be4e2820" [ 2064.263210] env[62740]: _type = "Task" [ 2064.263210] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.270335] env[62740]: DEBUG oslo_vmware.api [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5226fbaa-f701-2882-976a-67a9be4e2820, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.777887] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2064.778167] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2064.778381] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2090.202259] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4b0389ff2c1442bebf47d07266c2760d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2090.211469] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b0389ff2c1442bebf47d07266c2760d [ 2096.986275] env[62740]: WARNING oslo_vmware.rw_handles [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2096.986275] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2096.986275] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2096.986275] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2096.986275] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2096.986275] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 2096.986275] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2096.986275] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2096.986275] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2096.986275] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2096.986275] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2096.986275] env[62740]: ERROR oslo_vmware.rw_handles [ 2096.986948] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/82bc99da-b76b-4cde-b405-e891d9fd59bd/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2096.988922] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2096.989190] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Copying Virtual Disk [datastore2] vmware_temp/82bc99da-b76b-4cde-b405-e891d9fd59bd/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/82bc99da-b76b-4cde-b405-e891d9fd59bd/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2096.989477] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e43df36-99c4-4f98-9208-00fb347e4508 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.997610] env[62740]: DEBUG oslo_vmware.api [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for the task: (returnval){ [ 2096.997610] env[62740]: value = "task-640352" [ 2096.997610] env[62740]: _type = "Task" [ 2096.997610] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.005403] env[62740]: DEBUG oslo_vmware.api [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Task: {'id': task-640352, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.508273] env[62740]: DEBUG oslo_vmware.exceptions [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2097.508569] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2097.509171] env[62740]: ERROR nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2097.509171] env[62740]: Faults: ['InvalidArgument'] [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Traceback (most recent call last): [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] yield resources [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] self.driver.spawn(context, instance, image_meta, [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] self._fetch_image_if_missing(context, vi) [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] image_cache(vi, tmp_image_ds_loc) [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] vm_util.copy_virtual_disk( [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] session._wait_for_task(vmdk_copy_task) [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] return self.wait_for_task(task_ref) [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] return evt.wait() [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] result = hub.switch() [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] return self.greenlet.switch() [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] self.f(*self.args, **self.kw) [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] raise exceptions.translate_fault(task_info.error) [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Faults: ['InvalidArgument'] [ 2097.509171] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] [ 2097.510117] env[62740]: INFO nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Terminating instance [ 2097.511075] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2097.511321] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2097.511567] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-352eee47-54c9-4257-87d1-8eb013c5d24a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.513886] env[62740]: DEBUG nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2097.514094] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2097.514803] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a759c24f-f50c-41b4-9a6c-ad7984c39839 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.522280] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2097.522552] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee8f3cc3-c912-40e8-9981-4b551ad4ff02 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.525354] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2097.525601] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2097.526906] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d96db3f-f958-4588-a9ab-a277bcc01ee5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.532858] env[62740]: DEBUG oslo_vmware.api [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Waiting for the task: (returnval){ [ 2097.532858] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5279e6f1-0088-e86f-477a-fce75bbccfb5" [ 2097.532858] env[62740]: _type = "Task" [ 2097.532858] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.542731] env[62740]: DEBUG oslo_vmware.api [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5279e6f1-0088-e86f-477a-fce75bbccfb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.595879] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2097.596126] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2097.596315] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Deleting the datastore file [datastore2] c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2097.596594] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7727ecf5-186f-4d84-9475-ae96e6471800 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.603412] env[62740]: DEBUG oslo_vmware.api [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for the task: (returnval){ [ 2097.603412] env[62740]: value = "task-640354" [ 2097.603412] env[62740]: _type = "Task" [ 2097.603412] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.611110] env[62740]: DEBUG oslo_vmware.api [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Task: {'id': task-640354, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.043225] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2098.043596] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Creating directory with path [datastore2] vmware_temp/59813bcc-42a0-411f-8a32-f6799a78f48f/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2098.043715] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19e76853-08d7-443d-beb7-4212057886b2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.054702] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Created directory with path [datastore2] vmware_temp/59813bcc-42a0-411f-8a32-f6799a78f48f/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2098.054914] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Fetch image to [datastore2] vmware_temp/59813bcc-42a0-411f-8a32-f6799a78f48f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2098.055070] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/59813bcc-42a0-411f-8a32-f6799a78f48f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2098.055795] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e79e23b-ede3-4060-a5de-b3401abd0855 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.062567] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f34e744-ea3d-4214-a52c-18d6a8490665 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.071416] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1811cebc-67fb-4597-8373-b91f414bbcf4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.102008] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57107cfe-816e-4b2c-8f16-3ad2575e524f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.112672] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cdeabb93-3418-4a0e-8250-1d699bbbc8de {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.114343] env[62740]: DEBUG oslo_vmware.api [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Task: {'id': task-640354, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075137} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.114581] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2098.114764] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2098.114959] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2098.115169] env[62740]: INFO nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2098.117291] env[62740]: DEBUG nova.compute.claims [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2098.117468] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.117681] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.119730] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 928c695ba832413184c7b69924ff7e48 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2098.135131] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2098.171898] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 928c695ba832413184c7b69924ff7e48 [ 2098.268876] env[62740]: DEBUG oslo_vmware.rw_handles [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/59813bcc-42a0-411f-8a32-f6799a78f48f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2098.331106] env[62740]: DEBUG oslo_vmware.rw_handles [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2098.331331] env[62740]: DEBUG oslo_vmware.rw_handles [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/59813bcc-42a0-411f-8a32-f6799a78f48f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2098.352294] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0cafa1d-acf7-4301-bfd7-10fa3191701b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.359835] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c6c375-c30d-4da2-9e54-7f91217b053a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.388132] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca92d847-0f10-4ace-bd9a-2c5fec77acf6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.394567] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a8e95e-8a3f-45fe-8497-d83cb961a440 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.407724] env[62740]: DEBUG nova.compute.provider_tree [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2098.408211] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 60fa3253f49348369ab052bab0952a29 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2098.415218] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60fa3253f49348369ab052bab0952a29 [ 2098.416081] env[62740]: DEBUG nova.scheduler.client.report [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2098.418324] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg d5c7756ea17d429aa5c2989fb240692c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2098.429922] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5c7756ea17d429aa5c2989fb240692c [ 2098.430622] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.313s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2098.431163] env[62740]: ERROR nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2098.431163] env[62740]: Faults: ['InvalidArgument'] [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Traceback (most recent call last): [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] self.driver.spawn(context, instance, image_meta, [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] self._fetch_image_if_missing(context, vi) [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] image_cache(vi, tmp_image_ds_loc) [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] vm_util.copy_virtual_disk( [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] session._wait_for_task(vmdk_copy_task) [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] return self.wait_for_task(task_ref) [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] return evt.wait() [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] result = hub.switch() [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] return self.greenlet.switch() [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] self.f(*self.args, **self.kw) [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] raise exceptions.translate_fault(task_info.error) [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Faults: ['InvalidArgument'] [ 2098.431163] env[62740]: ERROR nova.compute.manager [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] [ 2098.431917] env[62740]: DEBUG nova.compute.utils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2098.433267] env[62740]: DEBUG nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Build of instance c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10 was re-scheduled: A specified parameter was not correct: fileType [ 2098.433267] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2098.433633] env[62740]: DEBUG nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2098.433805] env[62740]: DEBUG nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2098.433977] env[62740]: DEBUG nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2098.434160] env[62740]: DEBUG nova.network.neutron [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2098.837082] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 0c53b386ae7745e0a6ba44cf47738aee in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2098.848621] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c53b386ae7745e0a6ba44cf47738aee [ 2098.848745] env[62740]: DEBUG nova.network.neutron [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2098.849518] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 9e181e5f094b42be8c50d23e51f670ce in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2098.858606] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e181e5f094b42be8c50d23e51f670ce [ 2098.859414] env[62740]: INFO nova.compute.manager [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Took 0.43 seconds to deallocate network for instance. [ 2098.861187] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg cb2070f3ba8147cdb643effc7a2c1bd0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2098.897748] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb2070f3ba8147cdb643effc7a2c1bd0 [ 2098.901345] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg a5fcf20b25984e7a87eafd72dc8ed2de in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2098.936636] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5fcf20b25984e7a87eafd72dc8ed2de [ 2098.970465] env[62740]: INFO nova.scheduler.client.report [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Deleted allocations for instance c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10 [ 2098.976667] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg 5fb228735689463e945cb4f166fae5c2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2098.993759] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fb228735689463e945cb4f166fae5c2 [ 2098.995362] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e16501e9-c88e-4dd7-afd3-f369bf61b2c5 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 530.022s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2098.995362] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 334.167s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.995362] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Acquiring lock "c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.995845] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.995845] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2098.999155] env[62740]: INFO nova.compute.manager [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Terminating instance [ 2099.000738] env[62740]: DEBUG nova.compute.manager [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2099.000738] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2099.000944] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-929759af-3adf-45c3-b630-10a88d6523e1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.012338] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23194b9a-c717-493c-992d-06639dae0540 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.042092] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10 could not be found. [ 2099.042343] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2099.042554] env[62740]: INFO nova.compute.manager [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2099.042804] env[62740]: DEBUG oslo.service.loopingcall [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2099.043046] env[62740]: DEBUG nova.compute.manager [-] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2099.043157] env[62740]: DEBUG nova.network.neutron [-] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2099.059421] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5c2b74670c3e46adbdb8f8a349b4e005 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2099.066946] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c2b74670c3e46adbdb8f8a349b4e005 [ 2099.067319] env[62740]: DEBUG nova.network.neutron [-] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2099.067688] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 28835b121df5473580c86722d2583583 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2099.075517] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28835b121df5473580c86722d2583583 [ 2099.075965] env[62740]: INFO nova.compute.manager [-] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] Took 0.03 seconds to deallocate network for instance. [ 2099.081696] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg c4de542bb6124dbb889e4460dd2b0985 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2099.108604] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4de542bb6124dbb889e4460dd2b0985 [ 2099.122725] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg d21e7d91e39545f6bc5e74a08c8f37c3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2099.163937] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d21e7d91e39545f6bc5e74a08c8f37c3 [ 2099.166714] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Lock "c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.172s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.167062] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c15ba887-c356-4406-b909-50dd3bb8c551 tempest-AttachVolumeNegativeTest-1769210663 tempest-AttachVolumeNegativeTest-1769210663-project-member] Expecting reply to msg e5fd9f201ad84bebb5eb3633745c7580 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2099.167716] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 289.919s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2099.167909] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10] During sync_power_state the instance has a pending task (deleting). Skip. [ 2099.168096] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "c34c1c1c-c1c6-42f0-b3f1-a311cb2d7c10" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.176712] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5fd9f201ad84bebb5eb3633745c7580 [ 2103.192637] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "0f438d9b-394a-465c-97ae-8393bdc3e1cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2103.192947] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "0f438d9b-394a-465c-97ae-8393bdc3e1cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.193419] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 180d5e6840e848978a4c319494dadcaa in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2103.204281] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 180d5e6840e848978a4c319494dadcaa [ 2103.204738] env[62740]: DEBUG nova.compute.manager [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2103.206359] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 46dc2238413b4804913e25e127a846a4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2103.237416] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46dc2238413b4804913e25e127a846a4 [ 2103.255547] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2103.255936] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.258033] env[62740]: INFO nova.compute.claims [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2103.259512] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg c78da98c98124bf9b506f5cb105652ab in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2103.293184] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c78da98c98124bf9b506f5cb105652ab [ 2103.295100] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 9635898bfe0a411ebc593fa695461d33 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2103.303207] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9635898bfe0a411ebc593fa695461d33 [ 2103.318708] env[62740]: DEBUG nova.scheduler.client.report [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Refreshing inventories for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2103.331735] env[62740]: DEBUG nova.scheduler.client.report [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Updating ProviderTree inventory for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2103.331955] env[62740]: DEBUG nova.compute.provider_tree [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Updating inventory in ProviderTree for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2103.342851] env[62740]: DEBUG nova.scheduler.client.report [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Refreshing aggregate associations for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0, aggregates: None {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2103.367131] env[62740]: DEBUG nova.scheduler.client.report [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Refreshing trait associations for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2103.482236] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f4850f-e708-4ac1-b043-031a4709f262 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.490100] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4871c82b-5db5-4a3e-b076-ed7872bf5c97 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.518783] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353ae6ce-7099-4900-b245-375563bf247d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.526141] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301e71f3-6ae7-453a-b12f-b60dadaa1b38 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.539320] env[62740]: DEBUG nova.compute.provider_tree [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2103.539858] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 87d8f34537dd4ca4bdc4217879c8d681 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2103.547979] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87d8f34537dd4ca4bdc4217879c8d681 [ 2103.548989] env[62740]: DEBUG nova.scheduler.client.report [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2103.551362] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg c72ba6f9d3014d899d102c6b99fba734 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2103.563469] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c72ba6f9d3014d899d102c6b99fba734 [ 2103.564226] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.308s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.564698] env[62740]: DEBUG nova.compute.manager [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2103.566922] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 3dfd22fdd90b41308748130a93eba968 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2103.596364] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3dfd22fdd90b41308748130a93eba968 [ 2103.598143] env[62740]: DEBUG nova.compute.utils [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2103.598745] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 7bb8e9b6b47b479b8e1968c01b102fea in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2103.599770] env[62740]: DEBUG nova.compute.manager [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2103.599940] env[62740]: DEBUG nova.network.neutron [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2103.608452] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7bb8e9b6b47b479b8e1968c01b102fea [ 2103.609079] env[62740]: DEBUG nova.compute.manager [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2103.610663] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 86f321ffe2574fef9fc92b4520f04807 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2103.639643] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86f321ffe2574fef9fc92b4520f04807 [ 2103.642188] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg f4560a30c63f424c99146c08e6b62804 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2103.670514] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4560a30c63f424c99146c08e6b62804 [ 2103.671464] env[62740]: DEBUG nova.compute.manager [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2103.682636] env[62740]: DEBUG nova.policy [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa549a18cbf84678844e14ddd094d70e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '065d149aea7645d7a5e32c0d14ff0936', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 2103.696683] env[62740]: DEBUG nova.virt.hardware [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2103.696923] env[62740]: DEBUG nova.virt.hardware [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2103.697096] env[62740]: DEBUG nova.virt.hardware [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2103.697289] env[62740]: DEBUG nova.virt.hardware [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2103.697439] env[62740]: DEBUG nova.virt.hardware [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2103.697586] env[62740]: DEBUG nova.virt.hardware [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2103.697792] env[62740]: DEBUG nova.virt.hardware [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2103.697955] env[62740]: DEBUG nova.virt.hardware [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2103.698136] env[62740]: DEBUG nova.virt.hardware [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2103.698298] env[62740]: DEBUG nova.virt.hardware [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2103.698468] env[62740]: DEBUG nova.virt.hardware [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2103.699368] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b9071d-1dba-474d-b26c-6079e1fc32e5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.707083] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3882bc10-8073-4208-b504-b5b2cbc64b3c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.989670] env[62740]: DEBUG nova.network.neutron [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Successfully created port: 0b516ead-5b1e-4e78-a0f9-e4643bfd41e0 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2104.545012] env[62740]: DEBUG nova.compute.manager [req-81880a13-bbf8-4e98-ab5a-efc9882e5dfa req-66e374a8-660f-472b-9b93-2c51c5a4d410 service nova] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Received event network-vif-plugged-0b516ead-5b1e-4e78-a0f9-e4643bfd41e0 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2104.545366] env[62740]: DEBUG oslo_concurrency.lockutils [req-81880a13-bbf8-4e98-ab5a-efc9882e5dfa req-66e374a8-660f-472b-9b93-2c51c5a4d410 service nova] Acquiring lock "0f438d9b-394a-465c-97ae-8393bdc3e1cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.545621] env[62740]: DEBUG oslo_concurrency.lockutils [req-81880a13-bbf8-4e98-ab5a-efc9882e5dfa req-66e374a8-660f-472b-9b93-2c51c5a4d410 service nova] Lock "0f438d9b-394a-465c-97ae-8393bdc3e1cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2104.545888] env[62740]: DEBUG oslo_concurrency.lockutils [req-81880a13-bbf8-4e98-ab5a-efc9882e5dfa req-66e374a8-660f-472b-9b93-2c51c5a4d410 service nova] Lock "0f438d9b-394a-465c-97ae-8393bdc3e1cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.546230] env[62740]: DEBUG nova.compute.manager [req-81880a13-bbf8-4e98-ab5a-efc9882e5dfa req-66e374a8-660f-472b-9b93-2c51c5a4d410 service nova] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] No waiting events found dispatching network-vif-plugged-0b516ead-5b1e-4e78-a0f9-e4643bfd41e0 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2104.546568] env[62740]: WARNING nova.compute.manager [req-81880a13-bbf8-4e98-ab5a-efc9882e5dfa req-66e374a8-660f-472b-9b93-2c51c5a4d410 service nova] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Received unexpected event network-vif-plugged-0b516ead-5b1e-4e78-a0f9-e4643bfd41e0 for instance with vm_state building and task_state spawning. [ 2104.636265] env[62740]: DEBUG nova.network.neutron [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Successfully updated port: 0b516ead-5b1e-4e78-a0f9-e4643bfd41e0 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2104.636764] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg b614a4a3b46449c092361d9864d8890e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2104.646808] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b614a4a3b46449c092361d9864d8890e [ 2104.647522] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "refresh_cache-0f438d9b-394a-465c-97ae-8393bdc3e1cd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2104.647992] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "refresh_cache-0f438d9b-394a-465c-97ae-8393bdc3e1cd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2104.647992] env[62740]: DEBUG nova.network.neutron [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2104.648574] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg a5588aa1a85f475d8e3d9fa3803fbd35 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2104.657072] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5588aa1a85f475d8e3d9fa3803fbd35 [ 2104.685910] env[62740]: DEBUG nova.network.neutron [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2104.850225] env[62740]: DEBUG nova.network.neutron [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Updating instance_info_cache with network_info: [{"id": "0b516ead-5b1e-4e78-a0f9-e4643bfd41e0", "address": "fa:16:3e:fa:36:03", "network": {"id": "a1bf429f-63e1-4b06-ba31-36e8e686268d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1763096855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "065d149aea7645d7a5e32c0d14ff0936", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b516ead-5b", "ovs_interfaceid": "0b516ead-5b1e-4e78-a0f9-e4643bfd41e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2104.850750] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 3f7354ee26244fd1867e62f00fb6abc0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2104.861189] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f7354ee26244fd1867e62f00fb6abc0 [ 2104.861767] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Releasing lock "refresh_cache-0f438d9b-394a-465c-97ae-8393bdc3e1cd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2104.862068] env[62740]: DEBUG nova.compute.manager [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Instance network_info: |[{"id": "0b516ead-5b1e-4e78-a0f9-e4643bfd41e0", "address": "fa:16:3e:fa:36:03", "network": {"id": "a1bf429f-63e1-4b06-ba31-36e8e686268d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1763096855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "065d149aea7645d7a5e32c0d14ff0936", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b516ead-5b", "ovs_interfaceid": "0b516ead-5b1e-4e78-a0f9-e4643bfd41e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2104.862464] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:36:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b399c74-1411-408a-b4cd-84e268ae83fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b516ead-5b1e-4e78-a0f9-e4643bfd41e0', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2104.870047] env[62740]: DEBUG oslo.service.loopingcall [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2104.870354] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2104.870613] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71dd774a-3a29-4a05-91ce-f053e2d52bde {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.890986] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2104.890986] env[62740]: value = "task-640355" [ 2104.890986] env[62740]: _type = "Task" [ 2104.890986] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.894233] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2104.894432] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2104.894959] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2104.895176] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2104.895293] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2104.899279] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640355, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.401817] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640355, 'name': CreateVM_Task, 'duration_secs': 0.285219} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.401993] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2105.402711] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2105.402832] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2105.404051] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2105.404051] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89c5376f-6aec-4404-8774-f82abc7ea0f5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.407566] env[62740]: DEBUG oslo_vmware.api [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 2105.407566] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]524df73f-0bed-21c7-4bfd-5cedb4044c6d" [ 2105.407566] env[62740]: _type = "Task" [ 2105.407566] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.414761] env[62740]: DEBUG oslo_vmware.api [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]524df73f-0bed-21c7-4bfd-5cedb4044c6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.891387] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2105.891749] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2105.891749] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2105.892295] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 3ef98b4847c64501b6ae9c1e9d814e81 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2105.912525] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ef98b4847c64501b6ae9c1e9d814e81 [ 2105.914733] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2105.914885] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2105.915028] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2105.915161] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2105.915286] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2105.915405] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2105.915523] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2105.915639] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2105.915755] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2105.915869] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2105.915987] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2105.922668] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2105.922668] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2105.922787] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2106.573357] env[62740]: DEBUG nova.compute.manager [req-97909ee4-b12c-48eb-8179-e3a25c9f0c7e req-491ab250-2065-4d87-a06b-fc1c15144b00 service nova] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Received event network-changed-0b516ead-5b1e-4e78-a0f9-e4643bfd41e0 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2106.573569] env[62740]: DEBUG nova.compute.manager [req-97909ee4-b12c-48eb-8179-e3a25c9f0c7e req-491ab250-2065-4d87-a06b-fc1c15144b00 service nova] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Refreshing instance network info cache due to event network-changed-0b516ead-5b1e-4e78-a0f9-e4643bfd41e0. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2106.573782] env[62740]: DEBUG oslo_concurrency.lockutils [req-97909ee4-b12c-48eb-8179-e3a25c9f0c7e req-491ab250-2065-4d87-a06b-fc1c15144b00 service nova] Acquiring lock "refresh_cache-0f438d9b-394a-465c-97ae-8393bdc3e1cd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2106.573928] env[62740]: DEBUG oslo_concurrency.lockutils [req-97909ee4-b12c-48eb-8179-e3a25c9f0c7e req-491ab250-2065-4d87-a06b-fc1c15144b00 service nova] Acquired lock "refresh_cache-0f438d9b-394a-465c-97ae-8393bdc3e1cd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2106.574100] env[62740]: DEBUG nova.network.neutron [req-97909ee4-b12c-48eb-8179-e3a25c9f0c7e req-491ab250-2065-4d87-a06b-fc1c15144b00 service nova] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Refreshing network info cache for port 0b516ead-5b1e-4e78-a0f9-e4643bfd41e0 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2106.574591] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-97909ee4-b12c-48eb-8179-e3a25c9f0c7e req-491ab250-2065-4d87-a06b-fc1c15144b00 service nova] Expecting reply to msg 4f520120082c4e77a9fbb6e7bbc66de5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2106.581722] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f520120082c4e77a9fbb6e7bbc66de5 [ 2106.816237] env[62740]: DEBUG nova.network.neutron [req-97909ee4-b12c-48eb-8179-e3a25c9f0c7e req-491ab250-2065-4d87-a06b-fc1c15144b00 service nova] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Updated VIF entry in instance network info cache for port 0b516ead-5b1e-4e78-a0f9-e4643bfd41e0. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2106.816589] env[62740]: DEBUG nova.network.neutron [req-97909ee4-b12c-48eb-8179-e3a25c9f0c7e req-491ab250-2065-4d87-a06b-fc1c15144b00 service nova] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Updating instance_info_cache with network_info: [{"id": "0b516ead-5b1e-4e78-a0f9-e4643bfd41e0", "address": "fa:16:3e:fa:36:03", "network": {"id": "a1bf429f-63e1-4b06-ba31-36e8e686268d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1763096855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "065d149aea7645d7a5e32c0d14ff0936", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b516ead-5b", "ovs_interfaceid": "0b516ead-5b1e-4e78-a0f9-e4643bfd41e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2106.817258] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-97909ee4-b12c-48eb-8179-e3a25c9f0c7e req-491ab250-2065-4d87-a06b-fc1c15144b00 service nova] Expecting reply to msg 6629618eec4342349ce805427666968f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2106.825209] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6629618eec4342349ce805427666968f [ 2106.825756] env[62740]: DEBUG oslo_concurrency.lockutils [req-97909ee4-b12c-48eb-8179-e3a25c9f0c7e req-491ab250-2065-4d87-a06b-fc1c15144b00 service nova] Releasing lock "refresh_cache-0f438d9b-394a-465c-97ae-8393bdc3e1cd" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2106.890230] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2106.890574] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 914e776ffad547fd923a62f9ef7cfefd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2106.901155] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 914e776ffad547fd923a62f9ef7cfefd [ 2106.902056] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2106.902266] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.902432] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2106.902587] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2106.903655] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509e4dc9-d11d-4d1a-9b60-8d353b4f4016 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.912188] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1aec05-ce5d-438c-84b2-91f749d45a86 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.925453] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c30ec0d-2dd5-41b5-9014-8a06b39ff91f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.931546] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b16d3b3-62fc-4e3b-8594-7617eb3052c9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.959307] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181671MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2106.959395] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2106.959577] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.960352] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2254868c4ff44f33abfe390523b18060 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2106.993903] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2254868c4ff44f33abfe390523b18060 [ 2106.997809] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 05845e378c2b4669b525f292b77b8140 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2107.007438] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05845e378c2b4669b525f292b77b8140 [ 2107.028777] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2107.029116] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 9a595940-16ba-401a-922f-331cf87093c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2107.029362] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d2fb90b7-1618-4f07-8854-81566887a7cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2107.030041] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 19f27c16-45b3-47d8-acf0-18255844431f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2107.030041] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4ee71d81-7d8b-42f8-a27c-b4645169fa3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2107.030041] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2107.030041] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1b975b29-fbaa-4385-9bf9-33496b4ed129 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2107.030246] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b75490e6-ded3-4aa7-89ff-f4963fe82cfe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2107.030246] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2107.030334] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 0f438d9b-394a-465c-97ae-8393bdc3e1cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2107.030528] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2107.030664] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2107.144925] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a24a3d-7ba4-402c-a04a-19e17a0224b7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.152512] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a134018d-c663-456d-aa06-b6b85cc2195b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.181909] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca58d462-67b9-4c58-9aef-ce256c6d01a8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.188503] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cdf459-3159-44cc-b6a1-3adbe4f33ab8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.201316] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2107.201774] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0d3c263b9251465f9104f22035826e57 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2107.208509] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d3c263b9251465f9104f22035826e57 [ 2107.209393] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2107.211532] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg d23216e26dfe4324a30110474abbe9ab in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2107.225969] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d23216e26dfe4324a30110474abbe9ab [ 2107.225969] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2107.225969] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.266s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2109.226867] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2109.890994] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2112.891775] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2113.888030] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2113.888030] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg b823bc3e90714c3f9fbc0813c78c6b15 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2113.904885] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b823bc3e90714c3f9fbc0813c78c6b15 [ 2144.813046] env[62740]: WARNING oslo_vmware.rw_handles [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2144.813046] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2144.813046] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2144.813046] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2144.813046] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2144.813046] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 2144.813046] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2144.813046] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2144.813046] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2144.813046] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2144.813046] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2144.813046] env[62740]: ERROR oslo_vmware.rw_handles [ 2144.813694] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/59813bcc-42a0-411f-8a32-f6799a78f48f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2144.816610] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2144.817146] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Copying Virtual Disk [datastore2] vmware_temp/59813bcc-42a0-411f-8a32-f6799a78f48f/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/59813bcc-42a0-411f-8a32-f6799a78f48f/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2144.817704] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34ccb21c-b0b8-48e9-a57a-471a1318e9a1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.826785] env[62740]: DEBUG oslo_vmware.api [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Waiting for the task: (returnval){ [ 2144.826785] env[62740]: value = "task-640356" [ 2144.826785] env[62740]: _type = "Task" [ 2144.826785] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2144.837281] env[62740]: DEBUG oslo_vmware.api [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Task: {'id': task-640356, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.337818] env[62740]: DEBUG oslo_vmware.exceptions [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2145.338124] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2145.338700] env[62740]: ERROR nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2145.338700] env[62740]: Faults: ['InvalidArgument'] [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Traceback (most recent call last): [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] yield resources [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] self.driver.spawn(context, instance, image_meta, [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] self._fetch_image_if_missing(context, vi) [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] image_cache(vi, tmp_image_ds_loc) [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] vm_util.copy_virtual_disk( [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] session._wait_for_task(vmdk_copy_task) [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] return self.wait_for_task(task_ref) [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] return evt.wait() [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] result = hub.switch() [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] return self.greenlet.switch() [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] self.f(*self.args, **self.kw) [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] raise exceptions.translate_fault(task_info.error) [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Faults: ['InvalidArgument'] [ 2145.338700] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] [ 2145.339552] env[62740]: INFO nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Terminating instance [ 2145.340633] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2145.340902] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2145.341640] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5dcf88c-6f80-4524-a9ad-000785398ea0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.343221] env[62740]: DEBUG nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2145.343411] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2145.344139] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd03f11-e33d-4e9e-8dc2-3c11fbeaf9c8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.350837] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2145.351095] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24ee44f5-fc99-4a6e-bfb6-26c61c44b9af {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.353197] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2145.353372] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2145.354332] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d1e23bc-ded7-4241-b884-e362bbf47090 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.358730] env[62740]: DEBUG oslo_vmware.api [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Waiting for the task: (returnval){ [ 2145.358730] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52796f5c-7707-5f9f-1b49-1402b3ce1c40" [ 2145.358730] env[62740]: _type = "Task" [ 2145.358730] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2145.367765] env[62740]: DEBUG oslo_vmware.api [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52796f5c-7707-5f9f-1b49-1402b3ce1c40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.417145] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2145.417362] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2145.417546] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Deleting the datastore file [datastore2] 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2145.417816] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4904d844-6107-4e5d-805f-51b972797387 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.423925] env[62740]: DEBUG oslo_vmware.api [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Waiting for the task: (returnval){ [ 2145.423925] env[62740]: value = "task-640358" [ 2145.423925] env[62740]: _type = "Task" [ 2145.423925] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2145.431190] env[62740]: DEBUG oslo_vmware.api [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Task: {'id': task-640358, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.868990] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2145.869285] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Creating directory with path [datastore2] vmware_temp/dba2483b-bf55-4e26-adfd-4ab7147dcffe/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2145.869495] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed4b6df6-9995-4ced-bebf-7c17cc3baf4a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.880075] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Created directory with path [datastore2] vmware_temp/dba2483b-bf55-4e26-adfd-4ab7147dcffe/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2145.880272] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Fetch image to [datastore2] vmware_temp/dba2483b-bf55-4e26-adfd-4ab7147dcffe/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2145.880487] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/dba2483b-bf55-4e26-adfd-4ab7147dcffe/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2145.881168] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8702d6a-4b55-4a6b-9982-8cc700e628d0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.887160] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f8cd574-8d1c-4fcb-94b8-1d29ae77f37f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.895634] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b928fe88-27f7-41ae-ba28-9222f8a585d3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.927803] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a0a000-0ee6-4524-a0c3-ec7536e530f7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.934346] env[62740]: DEBUG oslo_vmware.api [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Task: {'id': task-640358, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076687} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2145.935662] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2145.935854] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2145.936043] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2145.936226] env[62740]: INFO nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2145.938014] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1ce40949-66b1-43ec-b39d-1587435e34ad {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.939838] env[62740]: DEBUG nova.compute.claims [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2145.940020] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2145.940288] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2145.942163] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg a8dc18a5b38c4734b8ce3edd31a64537 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2145.961720] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2145.982012] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8dc18a5b38c4734b8ce3edd31a64537 [ 2146.019045] env[62740]: DEBUG oslo_vmware.rw_handles [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dba2483b-bf55-4e26-adfd-4ab7147dcffe/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2146.081825] env[62740]: DEBUG oslo_vmware.rw_handles [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2146.082039] env[62740]: DEBUG oslo_vmware.rw_handles [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dba2483b-bf55-4e26-adfd-4ab7147dcffe/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2146.157394] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b018ebc4-bfc9-487f-8a47-d2732a7a92f5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.164798] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3f8a7d-6bac-4f3e-a107-32cdeafbf603 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.193984] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b2ad7e-79df-4c63-b58c-b6fc0a9cb242 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.200049] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f7bbeb9-5b36-4b18-8281-3caa24819a4c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.212091] env[62740]: DEBUG nova.compute.provider_tree [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2146.212583] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 2ad6763456c748f4bba48069aeaabfbe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2146.221058] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ad6763456c748f4bba48069aeaabfbe [ 2146.221978] env[62740]: DEBUG nova.scheduler.client.report [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2146.224229] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 5327a1aaa86f4cf0be56dc3f5d5408aa in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2146.237678] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5327a1aaa86f4cf0be56dc3f5d5408aa [ 2146.238383] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.298s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.238941] env[62740]: ERROR nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2146.238941] env[62740]: Faults: ['InvalidArgument'] [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Traceback (most recent call last): [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] self.driver.spawn(context, instance, image_meta, [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] self._fetch_image_if_missing(context, vi) [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] image_cache(vi, tmp_image_ds_loc) [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] vm_util.copy_virtual_disk( [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] session._wait_for_task(vmdk_copy_task) [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] return self.wait_for_task(task_ref) [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] return evt.wait() [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] result = hub.switch() [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] return self.greenlet.switch() [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] self.f(*self.args, **self.kw) [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] raise exceptions.translate_fault(task_info.error) [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Faults: ['InvalidArgument'] [ 2146.238941] env[62740]: ERROR nova.compute.manager [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] [ 2146.239789] env[62740]: DEBUG nova.compute.utils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2146.241171] env[62740]: DEBUG nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Build of instance 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67 was re-scheduled: A specified parameter was not correct: fileType [ 2146.241171] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2146.241550] env[62740]: DEBUG nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2146.241723] env[62740]: DEBUG nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2146.241906] env[62740]: DEBUG nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2146.242089] env[62740]: DEBUG nova.network.neutron [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2146.562746] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 0b6a07db23d348f9b248544f2a01078a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2146.576479] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b6a07db23d348f9b248544f2a01078a [ 2146.577115] env[62740]: DEBUG nova.network.neutron [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2146.577567] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 0694813e4b8c40389b1ed61e5a5fb389 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2146.586613] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0694813e4b8c40389b1ed61e5a5fb389 [ 2146.587320] env[62740]: INFO nova.compute.manager [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Took 0.35 seconds to deallocate network for instance. [ 2146.589418] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 085d2271590244a28a66192ab1db1657 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2146.626741] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 085d2271590244a28a66192ab1db1657 [ 2146.629415] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg d03cf5c28efb47b58feb6e6577c09371 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2146.661027] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d03cf5c28efb47b58feb6e6577c09371 [ 2146.687316] env[62740]: INFO nova.scheduler.client.report [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Deleted allocations for instance 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67 [ 2146.693406] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg b93ffd0ef7194b93ad42ea675c587bba in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2146.707728] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b93ffd0ef7194b93ad42ea675c587bba [ 2146.708269] env[62740]: DEBUG oslo_concurrency.lockutils [None req-580f27a0-e86d-4839-bd81-93e040f9160b tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 559.212s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.708507] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 362.395s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.708723] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Acquiring lock "1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.709213] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.709213] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.711103] env[62740]: INFO nova.compute.manager [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Terminating instance [ 2146.712878] env[62740]: DEBUG nova.compute.manager [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2146.712948] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2146.713636] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-13cb7a06-80ad-4290-a22b-f4bffdb4434a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.724291] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da789870-29b8-4cdd-945a-9f88dbaa8fd3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.751649] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67 could not be found. [ 2146.751851] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2146.752040] env[62740]: INFO nova.compute.manager [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2146.752294] env[62740]: DEBUG oslo.service.loopingcall [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2146.752527] env[62740]: DEBUG nova.compute.manager [-] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2146.752615] env[62740]: DEBUG nova.network.neutron [-] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2146.769576] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 24ba5d8cae89461a90800d09cfee250b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2146.776193] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24ba5d8cae89461a90800d09cfee250b [ 2146.776580] env[62740]: DEBUG nova.network.neutron [-] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2146.777100] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b3b9d5d0bf2243d281c2f3f9a98fcbb0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2146.784104] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3b9d5d0bf2243d281c2f3f9a98fcbb0 [ 2146.784550] env[62740]: INFO nova.compute.manager [-] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] Took 0.03 seconds to deallocate network for instance. [ 2146.788100] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 9bef745fc9a644f9900c029f2ff5576b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2146.819980] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bef745fc9a644f9900c029f2ff5576b [ 2146.833394] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg 605bd0ca6b934d71a56dbf854bc1f988 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2146.875803] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 605bd0ca6b934d71a56dbf854bc1f988 [ 2146.878529] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Lock "1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.170s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.878914] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c9c8d76d-2ce5-4ba2-8619-6a862799e6ec tempest-SecurityGroupsTestJSON-875453750 tempest-SecurityGroupsTestJSON-875453750-project-member] Expecting reply to msg fe446dbba6a64a13bf386c57db76fb6e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2146.879582] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 337.631s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.879806] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67] During sync_power_state the instance has a pending task (deleting). Skip. [ 2146.880033] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "1384d6b9-f253-4e1c-aa3b-fd8ffd73bb67" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.887721] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe446dbba6a64a13bf386c57db76fb6e [ 2164.890555] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2164.890555] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2165.528020] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Acquiring lock "2ad0d938-304b-4535-8362-099c3a6864f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2165.528020] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Lock "2ad0d938-304b-4535-8362-099c3a6864f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.528020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Expecting reply to msg 8174ac5bf0c641b28aef0434f0d383f9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2165.537281] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8174ac5bf0c641b28aef0434f0d383f9 [ 2165.537773] env[62740]: DEBUG nova.compute.manager [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2165.539448] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Expecting reply to msg 0369f4bca36c47148b9dd0cfb0a92852 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2165.580323] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0369f4bca36c47148b9dd0cfb0a92852 [ 2165.598244] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2165.598485] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.600036] env[62740]: INFO nova.compute.claims [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2165.601659] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Expecting reply to msg 3c913d46a0464d5cad029733517979d8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2165.683541] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c913d46a0464d5cad029733517979d8 [ 2165.685459] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Expecting reply to msg cb95a5df077341829b16502abaea1ee8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2165.693278] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb95a5df077341829b16502abaea1ee8 [ 2165.814874] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8a4cea-602b-4911-8f0c-b77478d2e05a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.822838] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d832e07-e814-432a-9c00-6129a87399fe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.854693] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f321fb-c24c-4675-9de9-eb87c10acab5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.861496] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5745b2e4-2641-41ba-bdd6-3fd03deea49f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.874230] env[62740]: DEBUG nova.compute.provider_tree [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2165.874735] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Expecting reply to msg af02ca162a3e4c57b5a4f10a85a38f73 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2165.883109] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af02ca162a3e4c57b5a4f10a85a38f73 [ 2165.883972] env[62740]: DEBUG nova.scheduler.client.report [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2165.886393] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Expecting reply to msg 42786ba43d4b4945af9bca244976bef2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2165.890043] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2165.890488] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2165.890638] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2165.910191] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42786ba43d4b4945af9bca244976bef2 [ 2165.911062] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.312s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.911541] env[62740]: DEBUG nova.compute.manager [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2165.913187] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Expecting reply to msg 46c7ec7a10e94680a889d52fc01ccafd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2165.960655] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46c7ec7a10e94680a889d52fc01ccafd [ 2165.962069] env[62740]: DEBUG nova.compute.utils [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2165.962672] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Expecting reply to msg 09c4eed27ce54bada1dc3bb52e77b014 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2165.963943] env[62740]: DEBUG nova.compute.manager [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2165.964440] env[62740]: DEBUG nova.network.neutron [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2165.973106] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09c4eed27ce54bada1dc3bb52e77b014 [ 2165.974663] env[62740]: DEBUG nova.compute.manager [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2165.975642] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Expecting reply to msg 2238e92cee4c411ba7758e56c8ce21de in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2166.006808] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2238e92cee4c411ba7758e56c8ce21de [ 2166.009545] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Expecting reply to msg c22766894a154f5cad94ce6af132ddc8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2166.025921] env[62740]: DEBUG nova.policy [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1128d7de322b44628dcb5bbf4745a392', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '09ed2c0bd38048c19f79be18a79c79a7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 2166.037903] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c22766894a154f5cad94ce6af132ddc8 [ 2166.039102] env[62740]: DEBUG nova.compute.manager [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2166.073821] env[62740]: DEBUG nova.virt.hardware [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2166.074158] env[62740]: DEBUG nova.virt.hardware [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2166.074315] env[62740]: DEBUG nova.virt.hardware [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2166.074499] env[62740]: DEBUG nova.virt.hardware [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2166.074644] env[62740]: DEBUG nova.virt.hardware [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2166.074793] env[62740]: DEBUG nova.virt.hardware [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2166.075008] env[62740]: DEBUG nova.virt.hardware [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2166.075179] env[62740]: DEBUG nova.virt.hardware [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2166.075347] env[62740]: DEBUG nova.virt.hardware [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2166.075507] env[62740]: DEBUG nova.virt.hardware [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2166.075679] env[62740]: DEBUG nova.virt.hardware [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2166.076825] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0f8594-bff8-47c1-862f-7a7e368278b6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.084890] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a50018-4b32-4607-af8f-57c5b437c4d9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.395972] env[62740]: DEBUG nova.network.neutron [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Successfully created port: 8d9fa5ad-b708-499a-912c-8a33088f691a {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2166.891534] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2166.891800] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2166.891838] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2166.892428] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 886f308162f640d899c762a59df1c7a1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2166.913371] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 886f308162f640d899c762a59df1c7a1 [ 2166.915717] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2166.915875] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2166.916059] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2166.916210] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2166.916371] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2166.916516] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2166.916667] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2166.916828] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2166.916972] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2166.917143] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2166.917296] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2166.930501] env[62740]: DEBUG nova.compute.manager [req-aae13a87-40fc-4d92-981c-1c639377c525 req-4d05b597-e74e-4db5-80f2-65c86b0f2cba service nova] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Received event network-vif-plugged-8d9fa5ad-b708-499a-912c-8a33088f691a {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2166.930718] env[62740]: DEBUG oslo_concurrency.lockutils [req-aae13a87-40fc-4d92-981c-1c639377c525 req-4d05b597-e74e-4db5-80f2-65c86b0f2cba service nova] Acquiring lock "2ad0d938-304b-4535-8362-099c3a6864f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2166.930925] env[62740]: DEBUG oslo_concurrency.lockutils [req-aae13a87-40fc-4d92-981c-1c639377c525 req-4d05b597-e74e-4db5-80f2-65c86b0f2cba service nova] Lock "2ad0d938-304b-4535-8362-099c3a6864f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2166.931108] env[62740]: DEBUG oslo_concurrency.lockutils [req-aae13a87-40fc-4d92-981c-1c639377c525 req-4d05b597-e74e-4db5-80f2-65c86b0f2cba service nova] Lock "2ad0d938-304b-4535-8362-099c3a6864f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2166.931307] env[62740]: DEBUG nova.compute.manager [req-aae13a87-40fc-4d92-981c-1c639377c525 req-4d05b597-e74e-4db5-80f2-65c86b0f2cba service nova] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] No waiting events found dispatching network-vif-plugged-8d9fa5ad-b708-499a-912c-8a33088f691a {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2166.931478] env[62740]: WARNING nova.compute.manager [req-aae13a87-40fc-4d92-981c-1c639377c525 req-4d05b597-e74e-4db5-80f2-65c86b0f2cba service nova] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Received unexpected event network-vif-plugged-8d9fa5ad-b708-499a-912c-8a33088f691a for instance with vm_state building and task_state spawning. [ 2167.012917] env[62740]: DEBUG nova.network.neutron [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Successfully updated port: 8d9fa5ad-b708-499a-912c-8a33088f691a {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2167.013439] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Expecting reply to msg 512e080ce14c4c83a85b75ef505a6885 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2167.024231] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 512e080ce14c4c83a85b75ef505a6885 [ 2167.024851] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Acquiring lock "refresh_cache-2ad0d938-304b-4535-8362-099c3a6864f6" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2167.024980] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Acquired lock "refresh_cache-2ad0d938-304b-4535-8362-099c3a6864f6" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2167.025141] env[62740]: DEBUG nova.network.neutron [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2167.025514] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Expecting reply to msg 7e087a4be81b470ca86ed632f2d7534a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2167.032839] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e087a4be81b470ca86ed632f2d7534a [ 2167.065826] env[62740]: DEBUG nova.network.neutron [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2167.303094] env[62740]: DEBUG nova.network.neutron [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Updating instance_info_cache with network_info: [{"id": "8d9fa5ad-b708-499a-912c-8a33088f691a", "address": "fa:16:3e:95:3e:fa", "network": {"id": "1f60a0cc-00de-466d-825a-9ffe8ed9ee33", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-786081648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09ed2c0bd38048c19f79be18a79c79a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d9fa5ad-b7", "ovs_interfaceid": "8d9fa5ad-b708-499a-912c-8a33088f691a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2167.303614] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Expecting reply to msg 41c0f081bf4e44cd90f3acbe2ba83619 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2167.315737] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41c0f081bf4e44cd90f3acbe2ba83619 [ 2167.316285] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Releasing lock "refresh_cache-2ad0d938-304b-4535-8362-099c3a6864f6" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2167.316561] env[62740]: DEBUG nova.compute.manager [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Instance network_info: |[{"id": "8d9fa5ad-b708-499a-912c-8a33088f691a", "address": "fa:16:3e:95:3e:fa", "network": {"id": "1f60a0cc-00de-466d-825a-9ffe8ed9ee33", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-786081648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09ed2c0bd38048c19f79be18a79c79a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d9fa5ad-b7", "ovs_interfaceid": "8d9fa5ad-b708-499a-912c-8a33088f691a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2167.316945] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:3e:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c405e9f-a6c8-4308-acac-071654efe18e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d9fa5ad-b708-499a-912c-8a33088f691a', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2167.324701] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Creating folder: Project (09ed2c0bd38048c19f79be18a79c79a7). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2167.325203] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfd40069-7625-4332-a05f-aeba765eb1d1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.336569] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Created folder: Project (09ed2c0bd38048c19f79be18a79c79a7) in parent group-v156037. [ 2167.336791] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Creating folder: Instances. Parent ref: group-v156187. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2167.337058] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb7983ca-8d42-4704-a010-d84ee80c5178 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.345858] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Created folder: Instances in parent group-v156187. [ 2167.345939] env[62740]: DEBUG oslo.service.loopingcall [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2167.346078] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2167.346277] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-caa3aa42-f102-421c-995f-7849d521114f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.364471] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2167.364471] env[62740]: value = "task-640361" [ 2167.364471] env[62740]: _type = "Task" [ 2167.364471] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.371613] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640361, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.873909] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640361, 'name': CreateVM_Task, 'duration_secs': 0.309965} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.874097] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2167.874772] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2167.874937] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2167.875306] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2167.875590] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-087c7613-2999-4111-b39c-f66ee42b8430 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.879865] env[62740]: DEBUG oslo_vmware.api [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Waiting for the task: (returnval){ [ 2167.879865] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]523d765c-126b-55a1-3f6a-92c987ab0acb" [ 2167.879865] env[62740]: _type = "Task" [ 2167.879865] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.886895] env[62740]: DEBUG oslo_vmware.api [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]523d765c-126b-55a1-3f6a-92c987ab0acb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.890371] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2167.890720] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 3032f24cdd7e47878233931ae4b53814 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2167.900392] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3032f24cdd7e47878233931ae4b53814 [ 2167.901380] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2167.901584] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2167.901754] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2167.901908] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2167.903265] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5a8654-e29a-405d-8daa-f3fc91e09caa {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.910747] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d2fcb84-2dde-4d3f-ab52-0e75c78e1091 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.924140] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944dddb4-12d3-494d-8f7e-5e50e50aaf62 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.930155] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280ed8fe-cd06-4d7f-b357-2ce5a60dc6b4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.959140] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181675MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2167.959276] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2167.959462] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2167.960274] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7118018611a14644af076acbaf9005f8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2167.993484] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7118018611a14644af076acbaf9005f8 [ 2167.997467] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 26c4dd4750fc4d2b83c69a2a87b09947 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2168.006316] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26c4dd4750fc4d2b83c69a2a87b09947 [ 2168.026067] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 9a595940-16ba-401a-922f-331cf87093c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2168.026206] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d2fb90b7-1618-4f07-8854-81566887a7cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2168.026332] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 19f27c16-45b3-47d8-acf0-18255844431f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2168.026452] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4ee71d81-7d8b-42f8-a27c-b4645169fa3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2168.026570] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2168.026686] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1b975b29-fbaa-4385-9bf9-33496b4ed129 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2168.026819] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b75490e6-ded3-4aa7-89ff-f4963fe82cfe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2168.026936] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2168.027061] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 0f438d9b-394a-465c-97ae-8393bdc3e1cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2168.027175] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2ad0d938-304b-4535-8362-099c3a6864f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2168.027354] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2168.027489] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2168.135820] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4d0192-7e7e-4b42-aff5-1ec961be7af9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.143508] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf79bc0-9a2d-4065-947f-94dd4ef1f4c2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.171747] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d22fdca-ba5a-42f5-85bb-58f81d9422f4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.178577] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca2956e-70bf-4410-ac1e-778b8f41a5e4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.192725] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2168.193186] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 4cc8daa9314a4a4db68451a88bda1d19 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2168.202303] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cc8daa9314a4a4db68451a88bda1d19 [ 2168.203145] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2168.205303] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg cbaf4055005b467cb38890ef86758d45 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2168.216978] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbaf4055005b467cb38890ef86758d45 [ 2168.217597] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2168.217773] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.258s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.390607] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2168.390607] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2168.390607] env[62740]: DEBUG oslo_concurrency.lockutils [None req-0bdc442d-ad0b-4f05-a2f1-5f9982bce6b7 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2168.961794] env[62740]: DEBUG nova.compute.manager [req-705b294d-f3ec-42cf-8618-765e3ddac579 req-74fef0e2-c21b-41fb-9477-0fbfe1a33725 service nova] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Received event network-changed-8d9fa5ad-b708-499a-912c-8a33088f691a {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2168.962322] env[62740]: DEBUG nova.compute.manager [req-705b294d-f3ec-42cf-8618-765e3ddac579 req-74fef0e2-c21b-41fb-9477-0fbfe1a33725 service nova] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Refreshing instance network info cache due to event network-changed-8d9fa5ad-b708-499a-912c-8a33088f691a. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2168.962551] env[62740]: DEBUG oslo_concurrency.lockutils [req-705b294d-f3ec-42cf-8618-765e3ddac579 req-74fef0e2-c21b-41fb-9477-0fbfe1a33725 service nova] Acquiring lock "refresh_cache-2ad0d938-304b-4535-8362-099c3a6864f6" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2168.962697] env[62740]: DEBUG oslo_concurrency.lockutils [req-705b294d-f3ec-42cf-8618-765e3ddac579 req-74fef0e2-c21b-41fb-9477-0fbfe1a33725 service nova] Acquired lock "refresh_cache-2ad0d938-304b-4535-8362-099c3a6864f6" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2168.962862] env[62740]: DEBUG nova.network.neutron [req-705b294d-f3ec-42cf-8618-765e3ddac579 req-74fef0e2-c21b-41fb-9477-0fbfe1a33725 service nova] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Refreshing network info cache for port 8d9fa5ad-b708-499a-912c-8a33088f691a {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2168.963375] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-705b294d-f3ec-42cf-8618-765e3ddac579 req-74fef0e2-c21b-41fb-9477-0fbfe1a33725 service nova] Expecting reply to msg 7058c4a4e18b40dcb2d6857d84de8916 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2168.970896] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7058c4a4e18b40dcb2d6857d84de8916 [ 2169.208140] env[62740]: DEBUG nova.network.neutron [req-705b294d-f3ec-42cf-8618-765e3ddac579 req-74fef0e2-c21b-41fb-9477-0fbfe1a33725 service nova] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Updated VIF entry in instance network info cache for port 8d9fa5ad-b708-499a-912c-8a33088f691a. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2169.208512] env[62740]: DEBUG nova.network.neutron [req-705b294d-f3ec-42cf-8618-765e3ddac579 req-74fef0e2-c21b-41fb-9477-0fbfe1a33725 service nova] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Updating instance_info_cache with network_info: [{"id": "8d9fa5ad-b708-499a-912c-8a33088f691a", "address": "fa:16:3e:95:3e:fa", "network": {"id": "1f60a0cc-00de-466d-825a-9ffe8ed9ee33", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-786081648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09ed2c0bd38048c19f79be18a79c79a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d9fa5ad-b7", "ovs_interfaceid": "8d9fa5ad-b708-499a-912c-8a33088f691a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2169.209116] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-705b294d-f3ec-42cf-8618-765e3ddac579 req-74fef0e2-c21b-41fb-9477-0fbfe1a33725 service nova] Expecting reply to msg c5dbf79b4e014ed58a1bdf0e562300de in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2169.218029] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2169.218542] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5dbf79b4e014ed58a1bdf0e562300de [ 2169.219279] env[62740]: DEBUG oslo_concurrency.lockutils [req-705b294d-f3ec-42cf-8618-765e3ddac579 req-74fef0e2-c21b-41fb-9477-0fbfe1a33725 service nova] Releasing lock "refresh_cache-2ad0d938-304b-4535-8362-099c3a6864f6" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2170.891288] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2172.892818] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2194.167489] env[62740]: WARNING oslo_vmware.rw_handles [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2194.167489] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2194.167489] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2194.167489] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2194.167489] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2194.167489] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 2194.167489] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2194.167489] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2194.167489] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2194.167489] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2194.167489] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2194.167489] env[62740]: ERROR oslo_vmware.rw_handles [ 2194.168215] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/dba2483b-bf55-4e26-adfd-4ab7147dcffe/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2194.170396] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2194.170670] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Copying Virtual Disk [datastore2] vmware_temp/dba2483b-bf55-4e26-adfd-4ab7147dcffe/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/dba2483b-bf55-4e26-adfd-4ab7147dcffe/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2194.170958] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7792c02-de8d-44f6-8250-8c7b964a3dce {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.178976] env[62740]: DEBUG oslo_vmware.api [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Waiting for the task: (returnval){ [ 2194.178976] env[62740]: value = "task-640362" [ 2194.178976] env[62740]: _type = "Task" [ 2194.178976] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2194.186674] env[62740]: DEBUG oslo_vmware.api [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Task: {'id': task-640362, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.688517] env[62740]: DEBUG oslo_vmware.exceptions [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2194.688857] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2194.689433] env[62740]: ERROR nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2194.689433] env[62740]: Faults: ['InvalidArgument'] [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] Traceback (most recent call last): [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] yield resources [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] self.driver.spawn(context, instance, image_meta, [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] self._fetch_image_if_missing(context, vi) [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] image_cache(vi, tmp_image_ds_loc) [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] vm_util.copy_virtual_disk( [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] session._wait_for_task(vmdk_copy_task) [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] return self.wait_for_task(task_ref) [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] return evt.wait() [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] result = hub.switch() [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] return self.greenlet.switch() [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] self.f(*self.args, **self.kw) [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] raise exceptions.translate_fault(task_info.error) [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] Faults: ['InvalidArgument'] [ 2194.689433] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] [ 2194.690390] env[62740]: INFO nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Terminating instance [ 2194.691391] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2194.691560] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2194.691784] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9158a48f-b703-46fd-a38f-11542a9079f6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.694165] env[62740]: DEBUG nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2194.694357] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2194.695179] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1437912a-f4c6-4436-b0aa-ca29ff3470aa {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.701863] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2194.702085] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eeb4a2c2-cdcd-413b-b6ed-c69559e68f02 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.704278] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2194.704545] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2194.705867] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df5897e3-0fd0-496a-8e66-fe3eb6a7685b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.710706] env[62740]: DEBUG oslo_vmware.api [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for the task: (returnval){ [ 2194.710706] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]529e1b10-8461-632d-c7b3-48330a5576bb" [ 2194.710706] env[62740]: _type = "Task" [ 2194.710706] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2194.717615] env[62740]: DEBUG oslo_vmware.api [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]529e1b10-8461-632d-c7b3-48330a5576bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.773915] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2194.774202] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2194.774398] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Deleting the datastore file [datastore2] 9a595940-16ba-401a-922f-331cf87093c9 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2194.774663] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c06b9e20-c06d-48d6-a849-05aad8f41724 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.780642] env[62740]: DEBUG oslo_vmware.api [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Waiting for the task: (returnval){ [ 2194.780642] env[62740]: value = "task-640364" [ 2194.780642] env[62740]: _type = "Task" [ 2194.780642] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2194.788196] env[62740]: DEBUG oslo_vmware.api [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Task: {'id': task-640364, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.221221] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2195.221548] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Creating directory with path [datastore2] vmware_temp/a71e924c-c685-42fd-a5f4-00d1aa014ec4/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2195.221717] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d81eb42-cf27-46e8-9bfd-30b6d75c29bf {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.232932] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Created directory with path [datastore2] vmware_temp/a71e924c-c685-42fd-a5f4-00d1aa014ec4/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2195.233136] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Fetch image to [datastore2] vmware_temp/a71e924c-c685-42fd-a5f4-00d1aa014ec4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2195.233310] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/a71e924c-c685-42fd-a5f4-00d1aa014ec4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2195.234029] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02451d01-79b2-4b4d-9bed-fd255fd4ec74 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.242054] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ccfc3a-2e26-4e5a-b487-b3f4139e8ce0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.250513] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec6e75d-8e80-475f-a7cb-697a4ece8544 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.281673] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99163eb8-b798-41e3-9340-4f0095fd011d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.293436] env[62740]: DEBUG oslo_vmware.api [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Task: {'id': task-640364, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074858} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2195.293970] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2195.294179] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2195.294358] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2195.294537] env[62740]: INFO nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2195.296070] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1470959d-e6b0-4958-8006-7237c23bf49c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.297980] env[62740]: DEBUG nova.compute.claims [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2195.298171] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2195.298399] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.300341] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 235793fa995b464aa7c3ff86f946bf34 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2195.321068] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2195.339153] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 235793fa995b464aa7c3ff86f946bf34 [ 2195.456599] env[62740]: DEBUG oslo_vmware.rw_handles [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a71e924c-c685-42fd-a5f4-00d1aa014ec4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2195.514350] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f384ece1-c36e-4291-81b8-2f3d0b7f470c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.519078] env[62740]: DEBUG oslo_vmware.rw_handles [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2195.519311] env[62740]: DEBUG oslo_vmware.rw_handles [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a71e924c-c685-42fd-a5f4-00d1aa014ec4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2195.523281] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a880282-76c2-4cf1-86e5-ec6fdd119303 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.552974] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc3fd44-173b-47c5-ac71-90ba1651103c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.560150] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba4bc00-2365-4a2a-9a21-20732be4e5da {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.573197] env[62740]: DEBUG nova.compute.provider_tree [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2195.573778] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 69908e4d4cc84773ac4acb64245e1a06 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2195.581243] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69908e4d4cc84773ac4acb64245e1a06 [ 2195.582227] env[62740]: DEBUG nova.scheduler.client.report [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2195.584589] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 4613c7cb7e054d3fbf997f1e60f2e348 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2195.600325] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4613c7cb7e054d3fbf997f1e60f2e348 [ 2195.601035] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.303s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2195.601546] env[62740]: ERROR nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2195.601546] env[62740]: Faults: ['InvalidArgument'] [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] Traceback (most recent call last): [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] self.driver.spawn(context, instance, image_meta, [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] self._fetch_image_if_missing(context, vi) [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] image_cache(vi, tmp_image_ds_loc) [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] vm_util.copy_virtual_disk( [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] session._wait_for_task(vmdk_copy_task) [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] return self.wait_for_task(task_ref) [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] return evt.wait() [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] result = hub.switch() [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] return self.greenlet.switch() [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] self.f(*self.args, **self.kw) [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] raise exceptions.translate_fault(task_info.error) [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] Faults: ['InvalidArgument'] [ 2195.601546] env[62740]: ERROR nova.compute.manager [instance: 9a595940-16ba-401a-922f-331cf87093c9] [ 2195.602660] env[62740]: DEBUG nova.compute.utils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2195.603646] env[62740]: DEBUG nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Build of instance 9a595940-16ba-401a-922f-331cf87093c9 was re-scheduled: A specified parameter was not correct: fileType [ 2195.603646] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2195.604032] env[62740]: DEBUG nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2195.604211] env[62740]: DEBUG nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2195.604385] env[62740]: DEBUG nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2195.604549] env[62740]: DEBUG nova.network.neutron [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2195.930237] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 5ebb00d1c1a44b2899b57fee9a0728d4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2195.940045] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ebb00d1c1a44b2899b57fee9a0728d4 [ 2195.940625] env[62740]: DEBUG nova.network.neutron [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2195.941099] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 6cf1bbbd08d44709973d6ecf2c4074b7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2195.951419] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cf1bbbd08d44709973d6ecf2c4074b7 [ 2195.952166] env[62740]: INFO nova.compute.manager [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Took 0.35 seconds to deallocate network for instance. [ 2195.954086] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 20f0707e030841ea8e6492b53b3e3a70 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2195.989804] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20f0707e030841ea8e6492b53b3e3a70 [ 2195.992491] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg d419d294191f4f03b82b59a7123574b7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2196.023053] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d419d294191f4f03b82b59a7123574b7 [ 2196.061647] env[62740]: INFO nova.scheduler.client.report [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Deleted allocations for instance 9a595940-16ba-401a-922f-331cf87093c9 [ 2196.067550] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg a74b1244302742bb998184981279fa3d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2196.082205] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a74b1244302742bb998184981279fa3d [ 2196.082773] env[62740]: DEBUG oslo_concurrency.lockutils [None req-73ab7270-0b9e-4ddd-8f00-0196511698fb tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "9a595940-16ba-401a-922f-331cf87093c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 578.927s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.083014] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "9a595940-16ba-401a-922f-331cf87093c9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 386.834s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2196.083212] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 9a595940-16ba-401a-922f-331cf87093c9] During sync_power_state the instance has a pending task (spawning). Skip. [ 2196.083393] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "9a595940-16ba-401a-922f-331cf87093c9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.083626] env[62740]: DEBUG oslo_concurrency.lockutils [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "9a595940-16ba-401a-922f-331cf87093c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 382.906s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2196.083847] env[62740]: DEBUG oslo_concurrency.lockutils [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "9a595940-16ba-401a-922f-331cf87093c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2196.084098] env[62740]: DEBUG oslo_concurrency.lockutils [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "9a595940-16ba-401a-922f-331cf87093c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2196.084276] env[62740]: DEBUG oslo_concurrency.lockutils [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "9a595940-16ba-401a-922f-331cf87093c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.086280] env[62740]: INFO nova.compute.manager [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Terminating instance [ 2196.087968] env[62740]: DEBUG nova.compute.manager [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2196.088275] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2196.088816] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-326e0673-91f8-4261-ae07-bf26882ca3f6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.098511] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5743cc9-f93f-454a-ad0d-d36ffc5d4b20 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.125333] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9a595940-16ba-401a-922f-331cf87093c9 could not be found. [ 2196.125569] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2196.125789] env[62740]: INFO nova.compute.manager [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2196.126088] env[62740]: DEBUG oslo.service.loopingcall [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2196.126358] env[62740]: DEBUG nova.compute.manager [-] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2196.126485] env[62740]: DEBUG nova.network.neutron [-] [instance: 9a595940-16ba-401a-922f-331cf87093c9] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2196.144627] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 47db70e5314c4a29ad19348e50df330d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2196.151586] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47db70e5314c4a29ad19348e50df330d [ 2196.152481] env[62740]: DEBUG nova.network.neutron [-] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2196.152481] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3b92f9150d2546a7b40f66cb46f3edde in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2196.160115] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b92f9150d2546a7b40f66cb46f3edde [ 2196.160556] env[62740]: INFO nova.compute.manager [-] [instance: 9a595940-16ba-401a-922f-331cf87093c9] Took 0.03 seconds to deallocate network for instance. [ 2196.164045] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 0689b6cce3c744e496471afe99589b00 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2196.190516] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0689b6cce3c744e496471afe99589b00 [ 2196.204470] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 3958e1d455bb428eb74468381b67e4db in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2196.245262] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3958e1d455bb428eb74468381b67e4db [ 2196.247889] env[62740]: DEBUG oslo_concurrency.lockutils [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "9a595940-16ba-401a-922f-331cf87093c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.164s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.248248] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-304ede8b-0aab-46a0-86f4-3703dbe5962c tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 6a89b7f035f14d5ca92b48ebc3d1b85d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2196.259286] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a89b7f035f14d5ca92b48ebc3d1b85d [ 2206.112398] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 69bd4348436e4eed98e75094afde6753 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2206.123849] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69bd4348436e4eed98e75094afde6753 [ 2206.124390] env[62740]: DEBUG oslo_concurrency.lockutils [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "b75490e6-ded3-4aa7-89ff-f4963fe82cfe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2210.203638] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 900bc60c1cd2404ebbecde0649ea64d2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2210.212612] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 900bc60c1cd2404ebbecde0649ea64d2 [ 2220.394733] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Acquiring lock "a925b4b2-7320-4c28-b083-c15adf060a00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2220.395139] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Lock "a925b4b2-7320-4c28-b083-c15adf060a00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2220.395428] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Expecting reply to msg d5579b9fe25f4c748c56129b508de590 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2220.405679] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5579b9fe25f4c748c56129b508de590 [ 2220.406163] env[62740]: DEBUG nova.compute.manager [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2220.407885] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Expecting reply to msg 55ab0e0791d44f2eb51c52cb3c40a3eb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2220.437854] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55ab0e0791d44f2eb51c52cb3c40a3eb [ 2220.452695] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2220.452934] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2220.454563] env[62740]: INFO nova.compute.claims [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2220.456277] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Expecting reply to msg 460ed85f6e854caf91e50d270c170817 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2220.486782] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 460ed85f6e854caf91e50d270c170817 [ 2220.488436] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Expecting reply to msg e1d83ec7a3cd4b75b1ba276891eae5bd in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2220.495697] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1d83ec7a3cd4b75b1ba276891eae5bd [ 2220.628997] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13e7af2-364c-4e08-9edf-1470b9a2ce21 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.637123] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c90ea7-587d-44ff-8385-5b3900f515d6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.668274] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3486dbfc-e95b-420e-b772-ee6753180d74 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.675472] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc782a16-b203-4721-bbf9-6a7a3817df4d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.688178] env[62740]: DEBUG nova.compute.provider_tree [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2220.688666] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Expecting reply to msg d6fbb54396864cc1978ebec17d0ec0c3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2220.696476] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6fbb54396864cc1978ebec17d0ec0c3 [ 2220.697382] env[62740]: DEBUG nova.scheduler.client.report [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2220.699622] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Expecting reply to msg 732fe94677b6400fa3bed0b2fd2639ee in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2220.711326] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 732fe94677b6400fa3bed0b2fd2639ee [ 2220.711987] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.259s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2220.712466] env[62740]: DEBUG nova.compute.manager [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2220.714018] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Expecting reply to msg 8311bc90baa44449be259d550b296bf3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2220.746735] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8311bc90baa44449be259d550b296bf3 [ 2220.748259] env[62740]: DEBUG nova.compute.utils [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2220.749063] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Expecting reply to msg 7a0b9537f3d8494d82cd70ff3822584b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2220.749908] env[62740]: DEBUG nova.compute.manager [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2220.750105] env[62740]: DEBUG nova.network.neutron [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2220.758595] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a0b9537f3d8494d82cd70ff3822584b [ 2220.759499] env[62740]: DEBUG nova.compute.manager [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2220.760764] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Expecting reply to msg 49a5e472c85b477fbb2d0fd215327f3c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2220.790886] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49a5e472c85b477fbb2d0fd215327f3c [ 2220.794048] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Expecting reply to msg 3349e2ae50b4457b828a1a2e351269ea in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2220.825253] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3349e2ae50b4457b828a1a2e351269ea [ 2220.826411] env[62740]: DEBUG nova.compute.manager [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2220.836995] env[62740]: DEBUG nova.policy [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac6d770baf2b41d5a2cdb5a2b3aea914', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '985754fdf1b944f989aa8b82f4b80026', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 2220.861889] env[62740]: DEBUG nova.virt.hardware [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2220.862310] env[62740]: DEBUG nova.virt.hardware [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2220.862310] env[62740]: DEBUG nova.virt.hardware [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2220.862479] env[62740]: DEBUG nova.virt.hardware [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2220.862627] env[62740]: DEBUG nova.virt.hardware [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2220.862775] env[62740]: DEBUG nova.virt.hardware [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2220.862982] env[62740]: DEBUG nova.virt.hardware [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2220.863160] env[62740]: DEBUG nova.virt.hardware [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2220.863326] env[62740]: DEBUG nova.virt.hardware [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2220.863490] env[62740]: DEBUG nova.virt.hardware [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2220.863659] env[62740]: DEBUG nova.virt.hardware [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2220.864775] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e31b3c3-f282-44be-b1ef-972e45b5f5cd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.872813] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64679b93-8311-4edf-bf2c-cadb31f85d76 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.242665] env[62740]: DEBUG nova.network.neutron [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Successfully created port: c2a22897-d6f9-48db-8b50-a4ea1f67bc33 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2221.803427] env[62740]: DEBUG nova.compute.manager [req-3bb7472a-c198-44ab-b52b-ed4445d4be0c req-a325c8d3-b920-43c7-b3ef-3794ed6fea46 service nova] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Received event network-vif-plugged-c2a22897-d6f9-48db-8b50-a4ea1f67bc33 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2221.803689] env[62740]: DEBUG oslo_concurrency.lockutils [req-3bb7472a-c198-44ab-b52b-ed4445d4be0c req-a325c8d3-b920-43c7-b3ef-3794ed6fea46 service nova] Acquiring lock "a925b4b2-7320-4c28-b083-c15adf060a00-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2221.803870] env[62740]: DEBUG oslo_concurrency.lockutils [req-3bb7472a-c198-44ab-b52b-ed4445d4be0c req-a325c8d3-b920-43c7-b3ef-3794ed6fea46 service nova] Lock "a925b4b2-7320-4c28-b083-c15adf060a00-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2221.804056] env[62740]: DEBUG oslo_concurrency.lockutils [req-3bb7472a-c198-44ab-b52b-ed4445d4be0c req-a325c8d3-b920-43c7-b3ef-3794ed6fea46 service nova] Lock "a925b4b2-7320-4c28-b083-c15adf060a00-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.804238] env[62740]: DEBUG nova.compute.manager [req-3bb7472a-c198-44ab-b52b-ed4445d4be0c req-a325c8d3-b920-43c7-b3ef-3794ed6fea46 service nova] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] No waiting events found dispatching network-vif-plugged-c2a22897-d6f9-48db-8b50-a4ea1f67bc33 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2221.804392] env[62740]: WARNING nova.compute.manager [req-3bb7472a-c198-44ab-b52b-ed4445d4be0c req-a325c8d3-b920-43c7-b3ef-3794ed6fea46 service nova] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Received unexpected event network-vif-plugged-c2a22897-d6f9-48db-8b50-a4ea1f67bc33 for instance with vm_state building and task_state spawning. [ 2221.860426] env[62740]: DEBUG nova.network.neutron [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Successfully updated port: c2a22897-d6f9-48db-8b50-a4ea1f67bc33 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2221.860898] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Expecting reply to msg fcca88c07dc9450aa41fe5261673fba9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2221.872273] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcca88c07dc9450aa41fe5261673fba9 [ 2221.872903] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Acquiring lock "refresh_cache-a925b4b2-7320-4c28-b083-c15adf060a00" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2221.873052] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Acquired lock "refresh_cache-a925b4b2-7320-4c28-b083-c15adf060a00" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2221.873203] env[62740]: DEBUG nova.network.neutron [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2221.873575] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Expecting reply to msg 0fd04b1824ec4f1293ade2ded067ad92 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2221.881426] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fd04b1824ec4f1293ade2ded067ad92 [ 2221.922090] env[62740]: DEBUG nova.network.neutron [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2222.083674] env[62740]: DEBUG nova.network.neutron [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Updating instance_info_cache with network_info: [{"id": "c2a22897-d6f9-48db-8b50-a4ea1f67bc33", "address": "fa:16:3e:32:72:29", "network": {"id": "5fa44336-54a0-48aa-8980-65125eab43cd", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-396453820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "985754fdf1b944f989aa8b82f4b80026", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2a22897-d6", "ovs_interfaceid": "c2a22897-d6f9-48db-8b50-a4ea1f67bc33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2222.084224] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Expecting reply to msg 6208740b56644519afef81510fa4e0c7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2222.094191] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6208740b56644519afef81510fa4e0c7 [ 2222.094730] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Releasing lock "refresh_cache-a925b4b2-7320-4c28-b083-c15adf060a00" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2222.094991] env[62740]: DEBUG nova.compute.manager [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Instance network_info: |[{"id": "c2a22897-d6f9-48db-8b50-a4ea1f67bc33", "address": "fa:16:3e:32:72:29", "network": {"id": "5fa44336-54a0-48aa-8980-65125eab43cd", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-396453820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "985754fdf1b944f989aa8b82f4b80026", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2a22897-d6", "ovs_interfaceid": "c2a22897-d6f9-48db-8b50-a4ea1f67bc33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2222.095422] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:72:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4020f51-6e46-4b73-a79e-9fe3fd51b917', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2a22897-d6f9-48db-8b50-a4ea1f67bc33', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2222.103187] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Creating folder: Project (985754fdf1b944f989aa8b82f4b80026). Parent ref: group-v156037. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2222.103658] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac97434e-ebd2-4b5c-b84d-04034fa845e1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.113839] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Created folder: Project (985754fdf1b944f989aa8b82f4b80026) in parent group-v156037. [ 2222.114031] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Creating folder: Instances. Parent ref: group-v156190. {{(pid=62740) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2222.114240] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b28a36b-cf31-4aed-83a3-c43e0c337c55 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.123855] env[62740]: INFO nova.virt.vmwareapi.vm_util [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Created folder: Instances in parent group-v156190. [ 2222.124052] env[62740]: DEBUG oslo.service.loopingcall [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2222.124227] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2222.124410] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e801551c-fccf-4c04-958a-f37926d61238 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.142660] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2222.142660] env[62740]: value = "task-640367" [ 2222.142660] env[62740]: _type = "Task" [ 2222.142660] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.154144] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640367, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.652496] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640367, 'name': CreateVM_Task, 'duration_secs': 0.288936} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2222.652727] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2222.653564] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2222.653784] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2222.654130] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2222.654377] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cc2ba64-3a09-4c6a-8577-fa68d66d716a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.658497] env[62740]: DEBUG oslo_vmware.api [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Waiting for the task: (returnval){ [ 2222.658497] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]520a6b14-a2bd-03d6-d600-cbbe20187218" [ 2222.658497] env[62740]: _type = "Task" [ 2222.658497] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.665614] env[62740]: DEBUG oslo_vmware.api [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]520a6b14-a2bd-03d6-d600-cbbe20187218, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.168409] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2223.168760] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2223.168933] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9757aa52-37cc-44f5-be64-97f66cbadd05 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2223.830790] env[62740]: DEBUG nova.compute.manager [req-ca670b71-f728-4ad9-be4e-8e14bd95dc8b req-45bf3d26-a01f-4912-8865-47c2ef088d3d service nova] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Received event network-changed-c2a22897-d6f9-48db-8b50-a4ea1f67bc33 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2223.831007] env[62740]: DEBUG nova.compute.manager [req-ca670b71-f728-4ad9-be4e-8e14bd95dc8b req-45bf3d26-a01f-4912-8865-47c2ef088d3d service nova] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Refreshing instance network info cache due to event network-changed-c2a22897-d6f9-48db-8b50-a4ea1f67bc33. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2223.831240] env[62740]: DEBUG oslo_concurrency.lockutils [req-ca670b71-f728-4ad9-be4e-8e14bd95dc8b req-45bf3d26-a01f-4912-8865-47c2ef088d3d service nova] Acquiring lock "refresh_cache-a925b4b2-7320-4c28-b083-c15adf060a00" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2223.831381] env[62740]: DEBUG oslo_concurrency.lockutils [req-ca670b71-f728-4ad9-be4e-8e14bd95dc8b req-45bf3d26-a01f-4912-8865-47c2ef088d3d service nova] Acquired lock "refresh_cache-a925b4b2-7320-4c28-b083-c15adf060a00" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2223.831544] env[62740]: DEBUG nova.network.neutron [req-ca670b71-f728-4ad9-be4e-8e14bd95dc8b req-45bf3d26-a01f-4912-8865-47c2ef088d3d service nova] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Refreshing network info cache for port c2a22897-d6f9-48db-8b50-a4ea1f67bc33 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2223.832031] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-ca670b71-f728-4ad9-be4e-8e14bd95dc8b req-45bf3d26-a01f-4912-8865-47c2ef088d3d service nova] Expecting reply to msg f3c37618345b49acbd0a19b9e5583359 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2223.839452] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3c37618345b49acbd0a19b9e5583359 [ 2224.092482] env[62740]: DEBUG nova.network.neutron [req-ca670b71-f728-4ad9-be4e-8e14bd95dc8b req-45bf3d26-a01f-4912-8865-47c2ef088d3d service nova] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Updated VIF entry in instance network info cache for port c2a22897-d6f9-48db-8b50-a4ea1f67bc33. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2224.092838] env[62740]: DEBUG nova.network.neutron [req-ca670b71-f728-4ad9-be4e-8e14bd95dc8b req-45bf3d26-a01f-4912-8865-47c2ef088d3d service nova] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Updating instance_info_cache with network_info: [{"id": "c2a22897-d6f9-48db-8b50-a4ea1f67bc33", "address": "fa:16:3e:32:72:29", "network": {"id": "5fa44336-54a0-48aa-8980-65125eab43cd", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-396453820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "985754fdf1b944f989aa8b82f4b80026", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2a22897-d6", "ovs_interfaceid": "c2a22897-d6f9-48db-8b50-a4ea1f67bc33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2224.093368] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-ca670b71-f728-4ad9-be4e-8e14bd95dc8b req-45bf3d26-a01f-4912-8865-47c2ef088d3d service nova] Expecting reply to msg 74003917b055432386ba57186fd704c4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2224.101953] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74003917b055432386ba57186fd704c4 [ 2224.102521] env[62740]: DEBUG oslo_concurrency.lockutils [req-ca670b71-f728-4ad9-be4e-8e14bd95dc8b req-45bf3d26-a01f-4912-8865-47c2ef088d3d service nova] Releasing lock "refresh_cache-a925b4b2-7320-4c28-b083-c15adf060a00" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2226.887027] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2226.889729] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2227.890588] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2227.890867] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2227.890902] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2227.891674] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 101b868b393848049c814c6aba3acc6c in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2227.911747] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 101b868b393848049c814c6aba3acc6c [ 2227.914007] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2227.914185] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2227.914320] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2227.914447] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2227.914571] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2227.914692] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2227.914811] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2227.914933] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2227.915063] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2227.915184] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2227.915344] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2227.915814] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2227.916008] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2227.916147] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2228.890579] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2228.890931] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f4fa40deba824dd891f3d0ef26dff047 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2228.901020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4fa40deba824dd891f3d0ef26dff047 [ 2228.902018] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2228.902235] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2228.902408] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2228.902563] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2228.903643] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a2f2d8-926b-40ec-8a10-f1e4ead17774 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.912188] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc38729-6e2d-42f7-a2b6-9a78127f852f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.927270] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf5cb88-a015-432b-ac67-2edec8f7715a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.933513] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4255a357-9fc6-4f2e-98b3-cd6be0e5684e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.961979] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181687MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2228.962141] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2228.962469] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2228.963153] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ced5ace3712b4511a7be27f2c08340c6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2228.997190] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ced5ace3712b4511a7be27f2c08340c6 [ 2229.001246] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 692d1a246eb849948acb889649d1ff02 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2229.010877] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 692d1a246eb849948acb889649d1ff02 [ 2229.077088] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance d2fb90b7-1618-4f07-8854-81566887a7cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2229.077261] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 19f27c16-45b3-47d8-acf0-18255844431f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2229.077392] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4ee71d81-7d8b-42f8-a27c-b4645169fa3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2229.077533] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2229.077669] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1b975b29-fbaa-4385-9bf9-33496b4ed129 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2229.077753] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b75490e6-ded3-4aa7-89ff-f4963fe82cfe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2229.077874] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2229.077996] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 0f438d9b-394a-465c-97ae-8393bdc3e1cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2229.078131] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2ad0d938-304b-4535-8362-099c3a6864f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2229.078239] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a925b4b2-7320-4c28-b083-c15adf060a00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2229.078437] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2229.078577] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2229.190012] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7865c40a-7418-4cf9-a26e-f7c5ab520281 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.198314] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba18cb0c-655c-4459-96d8-6157f91621c2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.228095] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29b1a76-7989-4d55-85c0-068301ba8e75 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.235075] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d786ab4-4dd0-48b5-bc3f-17341bb08ce1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.248143] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2229.248542] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg aa2024a479404eec93adeb528c39598b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2229.255791] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa2024a479404eec93adeb528c39598b [ 2229.256654] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2229.258848] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 6c4d1f00f99542bca5710b73a7784876 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2229.269030] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c4d1f00f99542bca5710b73a7784876 [ 2229.269670] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2229.269875] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.308s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2230.270250] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2230.891275] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2233.886537] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2233.887272] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 599bd3e0c37b49f1b37fec9399931a4b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2233.904462] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 599bd3e0c37b49f1b37fec9399931a4b [ 2234.891146] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2244.185552] env[62740]: WARNING oslo_vmware.rw_handles [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2244.185552] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2244.185552] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2244.185552] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2244.185552] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2244.185552] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 2244.185552] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2244.185552] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2244.185552] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2244.185552] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2244.185552] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2244.185552] env[62740]: ERROR oslo_vmware.rw_handles [ 2244.186174] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/a71e924c-c685-42fd-a5f4-00d1aa014ec4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2244.188532] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2244.188805] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Copying Virtual Disk [datastore2] vmware_temp/a71e924c-c685-42fd-a5f4-00d1aa014ec4/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/a71e924c-c685-42fd-a5f4-00d1aa014ec4/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2244.189163] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d810d89-ece4-45d9-9676-f7422816627e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.197130] env[62740]: DEBUG oslo_vmware.api [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for the task: (returnval){ [ 2244.197130] env[62740]: value = "task-640368" [ 2244.197130] env[62740]: _type = "Task" [ 2244.197130] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.204977] env[62740]: DEBUG oslo_vmware.api [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Task: {'id': task-640368, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.707445] env[62740]: DEBUG oslo_vmware.exceptions [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2244.707727] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2244.708307] env[62740]: ERROR nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2244.708307] env[62740]: Faults: ['InvalidArgument'] [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Traceback (most recent call last): [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] yield resources [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] self.driver.spawn(context, instance, image_meta, [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] self._fetch_image_if_missing(context, vi) [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] image_cache(vi, tmp_image_ds_loc) [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] vm_util.copy_virtual_disk( [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] session._wait_for_task(vmdk_copy_task) [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] return self.wait_for_task(task_ref) [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] return evt.wait() [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] result = hub.switch() [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] return self.greenlet.switch() [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] self.f(*self.args, **self.kw) [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] raise exceptions.translate_fault(task_info.error) [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Faults: ['InvalidArgument'] [ 2244.708307] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] [ 2244.709332] env[62740]: INFO nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Terminating instance [ 2244.710271] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2244.710477] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2244.710712] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03a5aa61-26a8-4835-9747-7acb96898700 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.714752] env[62740]: DEBUG nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2244.714752] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2244.714903] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee722e43-4020-4885-80ee-8ebb80466a35 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.721643] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2244.721860] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24d37eb6-0d71-4fbf-9e9c-f2e5b67012e9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.724021] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2244.724214] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2244.725178] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad1e493e-e233-411a-8c2a-c5409aef07f8 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.730027] env[62740]: DEBUG oslo_vmware.api [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Waiting for the task: (returnval){ [ 2244.730027] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52aeea66-7018-65cf-b54e-ef3d61e8cc35" [ 2244.730027] env[62740]: _type = "Task" [ 2244.730027] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.737015] env[62740]: DEBUG oslo_vmware.api [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52aeea66-7018-65cf-b54e-ef3d61e8cc35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.797704] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2244.797929] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2244.798119] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Deleting the datastore file [datastore2] d2fb90b7-1618-4f07-8854-81566887a7cd {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2244.798392] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e5f442a-5b91-4d62-9ed8-8f13b4290609 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.804804] env[62740]: DEBUG oslo_vmware.api [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for the task: (returnval){ [ 2244.804804] env[62740]: value = "task-640370" [ 2244.804804] env[62740]: _type = "Task" [ 2244.804804] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.811946] env[62740]: DEBUG oslo_vmware.api [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Task: {'id': task-640370, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.240729] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2245.241045] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Creating directory with path [datastore2] vmware_temp/809e4771-5521-4641-ab55-1f065f7783ac/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2245.241233] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7438e5bd-daa5-4abb-8923-7cdeee724f48 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.252526] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Created directory with path [datastore2] vmware_temp/809e4771-5521-4641-ab55-1f065f7783ac/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2245.252714] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Fetch image to [datastore2] vmware_temp/809e4771-5521-4641-ab55-1f065f7783ac/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2245.252889] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/809e4771-5521-4641-ab55-1f065f7783ac/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2245.253669] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf20596-6c6d-44d3-80d6-504dbcf25687 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.259838] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa1331e-3ac0-453c-8411-137038408bb1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.269646] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db12fd00-8d09-4885-ac68-e8dad8f96d8d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.299326] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c5f750-8155-4023-96d7-4c9074b09f8d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.304721] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0fef5f2c-341d-4a48-a3bf-318e0e912d02 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.314203] env[62740]: DEBUG oslo_vmware.api [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Task: {'id': task-640370, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066336} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.314420] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2245.314600] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2245.314798] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2245.315048] env[62740]: INFO nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2245.317100] env[62740]: DEBUG nova.compute.claims [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2245.317273] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2245.317503] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2245.319499] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 876773885ec34fd4a1a94845641bd83f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2245.329994] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2245.369029] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 876773885ec34fd4a1a94845641bd83f [ 2245.401473] env[62740]: DEBUG oslo_vmware.rw_handles [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/809e4771-5521-4641-ab55-1f065f7783ac/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2245.463795] env[62740]: DEBUG oslo_vmware.rw_handles [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2245.463983] env[62740]: DEBUG oslo_vmware.rw_handles [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/809e4771-5521-4641-ab55-1f065f7783ac/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2245.549067] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52058ae9-16cf-46ac-b4c2-14c55ba530da {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.556487] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b35177b-5380-448f-b584-0d534c9707b3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.587310] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ecf1ce-a063-49a2-b8f2-900d05597f2d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.594853] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d738cdb-cd50-475c-9406-0f9db53096f1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.609459] env[62740]: DEBUG nova.compute.provider_tree [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2245.610230] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg cc74a8a79181451cb76ff2a407a9bb0a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2245.618101] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc74a8a79181451cb76ff2a407a9bb0a [ 2245.619009] env[62740]: DEBUG nova.scheduler.client.report [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2245.621363] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg f9d2cb21f91543f6a8434370e21aeffe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2245.631536] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9d2cb21f91543f6a8434370e21aeffe [ 2245.632197] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.315s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.632775] env[62740]: ERROR nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2245.632775] env[62740]: Faults: ['InvalidArgument'] [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Traceback (most recent call last): [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] self.driver.spawn(context, instance, image_meta, [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] self._fetch_image_if_missing(context, vi) [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] image_cache(vi, tmp_image_ds_loc) [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] vm_util.copy_virtual_disk( [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] session._wait_for_task(vmdk_copy_task) [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] return self.wait_for_task(task_ref) [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] return evt.wait() [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] result = hub.switch() [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] return self.greenlet.switch() [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] self.f(*self.args, **self.kw) [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] raise exceptions.translate_fault(task_info.error) [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Faults: ['InvalidArgument'] [ 2245.632775] env[62740]: ERROR nova.compute.manager [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] [ 2245.633608] env[62740]: DEBUG nova.compute.utils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2245.634843] env[62740]: DEBUG nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Build of instance d2fb90b7-1618-4f07-8854-81566887a7cd was re-scheduled: A specified parameter was not correct: fileType [ 2245.634843] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2245.635234] env[62740]: DEBUG nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2245.635411] env[62740]: DEBUG nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2245.635582] env[62740]: DEBUG nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2245.635759] env[62740]: DEBUG nova.network.neutron [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2245.926699] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 319ee092fa754bd785b86fc6584602df in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2245.937963] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 319ee092fa754bd785b86fc6584602df [ 2245.938627] env[62740]: DEBUG nova.network.neutron [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2245.939186] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 00360cddb242498699c5b3bd953c9d53 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2245.949066] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00360cddb242498699c5b3bd953c9d53 [ 2245.949645] env[62740]: INFO nova.compute.manager [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Took 0.31 seconds to deallocate network for instance. [ 2245.951396] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg b107d3ecf6d94ed7bba03560b76f577e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2245.984136] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b107d3ecf6d94ed7bba03560b76f577e [ 2245.987133] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg d703a5fe8e2b4ec1a6165ac4596453d7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2246.016831] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d703a5fe8e2b4ec1a6165ac4596453d7 [ 2246.043021] env[62740]: INFO nova.scheduler.client.report [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Deleted allocations for instance d2fb90b7-1618-4f07-8854-81566887a7cd [ 2246.049148] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 19b543140bea4eada3ba4bb75961bff5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2246.064514] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19b543140bea4eada3ba4bb75961bff5 [ 2246.065115] env[62740]: DEBUG oslo_concurrency.lockutils [None req-5bd4da85-a83f-47dd-b883-86a02df641e0 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "d2fb90b7-1618-4f07-8854-81566887a7cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 627.864s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.065351] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "d2fb90b7-1618-4f07-8854-81566887a7cd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 436.816s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.065536] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] During sync_power_state the instance has a pending task (spawning). Skip. [ 2246.065701] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "d2fb90b7-1618-4f07-8854-81566887a7cd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.065947] env[62740]: DEBUG oslo_concurrency.lockutils [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "d2fb90b7-1618-4f07-8854-81566887a7cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 431.939s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.066191] env[62740]: DEBUG oslo_concurrency.lockutils [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Acquiring lock "d2fb90b7-1618-4f07-8854-81566887a7cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2246.066396] env[62740]: DEBUG oslo_concurrency.lockutils [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "d2fb90b7-1618-4f07-8854-81566887a7cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.066561] env[62740]: DEBUG oslo_concurrency.lockutils [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "d2fb90b7-1618-4f07-8854-81566887a7cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.068609] env[62740]: INFO nova.compute.manager [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Terminating instance [ 2246.070454] env[62740]: DEBUG nova.compute.manager [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2246.070675] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2246.071227] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a7bfd0ae-c4f6-457b-87ad-533d0fd138b6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.081334] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092bf1c9-e4c4-48b1-9ba0-048ca7c13106 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.109095] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d2fb90b7-1618-4f07-8854-81566887a7cd could not be found. [ 2246.109315] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2246.109493] env[62740]: INFO nova.compute.manager [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2246.109734] env[62740]: DEBUG oslo.service.loopingcall [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2246.109961] env[62740]: DEBUG nova.compute.manager [-] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2246.110087] env[62740]: DEBUG nova.network.neutron [-] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2246.127568] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 64499a4ef73b49bca420806f3207e06f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2246.134264] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64499a4ef73b49bca420806f3207e06f [ 2246.134655] env[62740]: DEBUG nova.network.neutron [-] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2246.135052] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8314f3acacb5449a9a42a929a15ce4c1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2246.142933] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8314f3acacb5449a9a42a929a15ce4c1 [ 2246.143265] env[62740]: INFO nova.compute.manager [-] [instance: d2fb90b7-1618-4f07-8854-81566887a7cd] Took 0.03 seconds to deallocate network for instance. [ 2246.147041] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 8942c82a15994ba8961f4902a15520f3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2246.174943] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8942c82a15994ba8961f4902a15520f3 [ 2246.188808] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 910019c928be4e7aa05962262988c1ca in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2246.227092] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 910019c928be4e7aa05962262988c1ca [ 2246.230417] env[62740]: DEBUG oslo_concurrency.lockutils [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Lock "d2fb90b7-1618-4f07-8854-81566887a7cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.164s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.230749] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-84948cae-18f3-4f57-bc24-d27515fac153 tempest-ImagesTestJSON-592126557 tempest-ImagesTestJSON-592126557-project-member] Expecting reply to msg 2ee098fbd9bc4db48c98c29363c6086a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2246.241430] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ee098fbd9bc4db48c98c29363c6086a [ 2258.694610] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 5ecfea0ca5924064bcb0dae3a70c9969 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2258.704473] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ecfea0ca5924064bcb0dae3a70c9969 [ 2258.705077] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquiring lock "22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2287.890629] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2287.891123] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2287.891123] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2288.887499] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2288.890858] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2288.890858] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2288.890858] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2288.891237] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg c057e166ebdc474aa4cf54b09040d148 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2288.908259] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c057e166ebdc474aa4cf54b09040d148 [ 2288.910495] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2288.910649] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2288.910781] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2288.910909] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2288.911048] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2288.911178] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2288.911300] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2288.911418] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2288.911535] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2288.911656] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2288.912176] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2289.890613] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2289.891024] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 09db251ab8ac48bc83cc4dfb957c4d77 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2289.900984] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09db251ab8ac48bc83cc4dfb957c4d77 [ 2289.901986] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2289.902213] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2289.902379] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2289.902534] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2289.903622] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be68779c-039e-43b8-b6de-ea00eec53e73 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.912364] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b987e3d3-ad66-41db-b8f4-c8f556c98f62 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.925812] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d9d566-9294-40cd-b5db-e4728756602e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.931929] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c83f2103-c6b2-4f6b-95e5-06b91786ce0b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.961039] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181665MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2289.961207] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2289.961381] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2289.962188] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2f76cb00fa484dd7a905e60415401560 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2289.993420] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f76cb00fa484dd7a905e60415401560 [ 2289.997168] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f6918f0f2bbb40908c1c80f413303c33 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2290.006632] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6918f0f2bbb40908c1c80f413303c33 [ 2290.054485] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 19f27c16-45b3-47d8-acf0-18255844431f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2290.054485] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 4ee71d81-7d8b-42f8-a27c-b4645169fa3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2290.054485] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2290.054485] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1b975b29-fbaa-4385-9bf9-33496b4ed129 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2290.054786] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b75490e6-ded3-4aa7-89ff-f4963fe82cfe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2290.054786] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2290.054786] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 0f438d9b-394a-465c-97ae-8393bdc3e1cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2290.054939] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2ad0d938-304b-4535-8362-099c3a6864f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2290.055010] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a925b4b2-7320-4c28-b083-c15adf060a00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2290.055207] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2290.055351] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2290.161057] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e502ad0-c29a-498a-8669-12dd757b333d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.168790] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe17bbbf-c854-4b12-b030-855f431e75e7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.198097] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92cecc8f-1f26-45f8-a012-adeb2a027374 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.205163] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eeda798-cd33-41bc-a02b-7128eab5c7da {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.217462] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2290.217911] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg d8b3b69ee1404703a98bc765ed5d1751 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2290.224673] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8b3b69ee1404703a98bc765ed5d1751 [ 2290.225550] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2290.227739] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 71dd3a8607fb4bf98874ac55d5800869 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2290.242551] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71dd3a8607fb4bf98874ac55d5800869 [ 2290.243211] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2290.243394] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.282s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2291.243635] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2291.654015] env[62740]: WARNING oslo_vmware.rw_handles [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2291.654015] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2291.654015] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2291.654015] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2291.654015] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2291.654015] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 2291.654015] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2291.654015] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2291.654015] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2291.654015] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2291.654015] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2291.654015] env[62740]: ERROR oslo_vmware.rw_handles [ 2291.654563] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/809e4771-5521-4641-ab55-1f065f7783ac/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2291.656915] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2291.657201] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Copying Virtual Disk [datastore2] vmware_temp/809e4771-5521-4641-ab55-1f065f7783ac/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/809e4771-5521-4641-ab55-1f065f7783ac/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2291.657515] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5393c3ec-285e-4b7c-98f5-f925df1f4e5f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.666086] env[62740]: DEBUG oslo_vmware.api [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Waiting for the task: (returnval){ [ 2291.666086] env[62740]: value = "task-640371" [ 2291.666086] env[62740]: _type = "Task" [ 2291.666086] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2291.674753] env[62740]: DEBUG oslo_vmware.api [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Task: {'id': task-640371, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.890613] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2292.175551] env[62740]: DEBUG oslo_vmware.exceptions [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2292.175853] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2292.176436] env[62740]: ERROR nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2292.176436] env[62740]: Faults: ['InvalidArgument'] [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Traceback (most recent call last): [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] yield resources [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] self.driver.spawn(context, instance, image_meta, [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] self._fetch_image_if_missing(context, vi) [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] image_cache(vi, tmp_image_ds_loc) [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] vm_util.copy_virtual_disk( [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] session._wait_for_task(vmdk_copy_task) [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] return self.wait_for_task(task_ref) [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] return evt.wait() [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] result = hub.switch() [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] return self.greenlet.switch() [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] self.f(*self.args, **self.kw) [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] raise exceptions.translate_fault(task_info.error) [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Faults: ['InvalidArgument'] [ 2292.176436] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] [ 2292.177358] env[62740]: INFO nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Terminating instance [ 2292.178555] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2292.178555] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2292.179167] env[62740]: DEBUG nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2292.179360] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2292.179579] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c21249b8-b278-4865-9320-f8b31c94e845 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.181732] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa37b59-cf3e-4e78-8678-85ac4fff930b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.188018] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2292.188227] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-418d9f09-d047-4ace-800c-37e3c67db0aa {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.190280] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2292.190455] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2292.191415] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5b678fe-ed28-46ac-b973-19937b180e68 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.195834] env[62740]: DEBUG oslo_vmware.api [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 2292.195834] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52131e78-748d-3770-f8b0-b1a48ebb0973" [ 2292.195834] env[62740]: _type = "Task" [ 2292.195834] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2292.202964] env[62740]: DEBUG oslo_vmware.api [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52131e78-748d-3770-f8b0-b1a48ebb0973, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.269436] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2292.269687] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2292.269873] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Deleting the datastore file [datastore2] 19f27c16-45b3-47d8-acf0-18255844431f {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2292.270169] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-357fd58b-4644-4245-98f8-cb9ce66b430f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.276460] env[62740]: DEBUG oslo_vmware.api [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Waiting for the task: (returnval){ [ 2292.276460] env[62740]: value = "task-640373" [ 2292.276460] env[62740]: _type = "Task" [ 2292.276460] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2292.284265] env[62740]: DEBUG oslo_vmware.api [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Task: {'id': task-640373, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.706458] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2292.706679] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating directory with path [datastore2] vmware_temp/058238dd-496f-49c2-8420-8017836f4b27/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2292.706915] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ac94a50-4e74-4e48-890c-5be4920e0a03 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.717911] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Created directory with path [datastore2] vmware_temp/058238dd-496f-49c2-8420-8017836f4b27/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2292.718112] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Fetch image to [datastore2] vmware_temp/058238dd-496f-49c2-8420-8017836f4b27/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2292.718295] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/058238dd-496f-49c2-8420-8017836f4b27/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2292.719071] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70c922e-1ba0-405b-b6ae-25d7d755112e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.725605] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00726bce-8161-4832-81dc-32a04765c366 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.734443] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c90cd13-f136-4e40-8764-3926bcb69b3a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.765089] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61135d79-1453-4138-8c29-d245456a7bed {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.770644] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b54d2abf-da0b-4935-a7ad-b5b66edd51a0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.784533] env[62740]: DEBUG oslo_vmware.api [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Task: {'id': task-640373, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081797} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2292.784767] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2292.784987] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2292.785185] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2292.785361] env[62740]: INFO nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2292.787477] env[62740]: DEBUG nova.compute.claims [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2292.787649] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2292.787898] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2292.789839] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 597f439828c54196be377b00fad6bc7f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2292.792009] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2292.821813] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 597f439828c54196be377b00fad6bc7f [ 2292.841459] env[62740]: DEBUG oslo_vmware.rw_handles [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/058238dd-496f-49c2-8420-8017836f4b27/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2292.903220] env[62740]: DEBUG oslo_vmware.rw_handles [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2292.903447] env[62740]: DEBUG oslo_vmware.rw_handles [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/058238dd-496f-49c2-8420-8017836f4b27/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2292.988643] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfcf8b9b-21b5-4560-aaeb-badde930fd92 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.998681] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50bbd0d-527b-4949-85ec-479e973905a0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.027388] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5111501-8b9a-445e-bda7-f9808f5d90ac {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.034039] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144e48fc-77bf-4d8b-b892-daac2bc98242 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.046715] env[62740]: DEBUG nova.compute.provider_tree [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2293.047247] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg cffabce4cf1e44f79b32b65fb1914117 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2293.054751] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cffabce4cf1e44f79b32b65fb1914117 [ 2293.055601] env[62740]: DEBUG nova.scheduler.client.report [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2293.057833] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 97dffd2b27fe4e83aaf73674b34c14d8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2293.069616] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97dffd2b27fe4e83aaf73674b34c14d8 [ 2293.070301] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.282s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2293.070806] env[62740]: ERROR nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2293.070806] env[62740]: Faults: ['InvalidArgument'] [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Traceback (most recent call last): [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] self.driver.spawn(context, instance, image_meta, [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] self._fetch_image_if_missing(context, vi) [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] image_cache(vi, tmp_image_ds_loc) [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] vm_util.copy_virtual_disk( [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] session._wait_for_task(vmdk_copy_task) [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] return self.wait_for_task(task_ref) [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] return evt.wait() [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] result = hub.switch() [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] return self.greenlet.switch() [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] self.f(*self.args, **self.kw) [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] raise exceptions.translate_fault(task_info.error) [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Faults: ['InvalidArgument'] [ 2293.070806] env[62740]: ERROR nova.compute.manager [instance: 19f27c16-45b3-47d8-acf0-18255844431f] [ 2293.071890] env[62740]: DEBUG nova.compute.utils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2293.072812] env[62740]: DEBUG nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Build of instance 19f27c16-45b3-47d8-acf0-18255844431f was re-scheduled: A specified parameter was not correct: fileType [ 2293.072812] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2293.073215] env[62740]: DEBUG nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2293.073388] env[62740]: DEBUG nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2293.073560] env[62740]: DEBUG nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2293.073726] env[62740]: DEBUG nova.network.neutron [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2293.530070] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 5df8a2508c894cf79e8e7585b71e8e63 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2293.541017] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5df8a2508c894cf79e8e7585b71e8e63 [ 2293.541138] env[62740]: DEBUG nova.network.neutron [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2293.541888] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg f05687781e2e45e6b220fef811b2cfcb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2293.552223] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f05687781e2e45e6b220fef811b2cfcb [ 2293.552223] env[62740]: INFO nova.compute.manager [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Took 0.48 seconds to deallocate network for instance. [ 2293.553527] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 7ede8a342d564f6a9c4c39049311eef1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2293.591611] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ede8a342d564f6a9c4c39049311eef1 [ 2293.594407] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg f8497756a403450e90e3348f6fefc02a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2293.651278] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8497756a403450e90e3348f6fefc02a [ 2293.675529] env[62740]: INFO nova.scheduler.client.report [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Deleted allocations for instance 19f27c16-45b3-47d8-acf0-18255844431f [ 2293.681628] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 9ab1717927ef47aaa20f0b22cc6f820d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2293.695325] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ab1717927ef47aaa20f0b22cc6f820d [ 2293.695851] env[62740]: DEBUG oslo_concurrency.lockutils [None req-6f6e2a69-9d27-46d7-8a1f-0fd52c3e9be0 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Lock "19f27c16-45b3-47d8-acf0-18255844431f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 624.821s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2293.696125] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Lock "19f27c16-45b3-47d8-acf0-18255844431f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 428.992s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2293.696366] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Acquiring lock "19f27c16-45b3-47d8-acf0-18255844431f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2293.696573] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Lock "19f27c16-45b3-47d8-acf0-18255844431f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2293.696738] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Lock "19f27c16-45b3-47d8-acf0-18255844431f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2293.698630] env[62740]: INFO nova.compute.manager [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Terminating instance [ 2293.700370] env[62740]: DEBUG nova.compute.manager [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2293.700562] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2293.701031] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5a157c0f-f19c-488c-8ba5-3487cf9c031f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.710479] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696ea532-8945-4dce-82ba-19926a0eef74 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.736682] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 19f27c16-45b3-47d8-acf0-18255844431f could not be found. [ 2293.736877] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2293.737073] env[62740]: INFO nova.compute.manager [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2293.737329] env[62740]: DEBUG oslo.service.loopingcall [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2293.737781] env[62740]: DEBUG nova.compute.manager [-] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2293.737882] env[62740]: DEBUG nova.network.neutron [-] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2293.753509] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 231057ca7df34f3aaafb5995f70cc32a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2293.759236] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 231057ca7df34f3aaafb5995f70cc32a [ 2293.759571] env[62740]: DEBUG nova.network.neutron [-] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2293.759981] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1e858abf68d247fea00283fc7cc32194 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2293.767506] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e858abf68d247fea00283fc7cc32194 [ 2293.767944] env[62740]: INFO nova.compute.manager [-] [instance: 19f27c16-45b3-47d8-acf0-18255844431f] Took 0.03 seconds to deallocate network for instance. [ 2293.771308] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg 5b55224f0f7f4c5fab4e0a7be12d4211 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2293.797230] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b55224f0f7f4c5fab4e0a7be12d4211 [ 2293.811559] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg e8d079e7ff7d436bb80cc7133997afb8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2293.847072] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8d079e7ff7d436bb80cc7133997afb8 [ 2293.849671] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Lock "19f27c16-45b3-47d8-acf0-18255844431f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.154s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2293.850038] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2f4ba341-ad6c-439b-8b8d-aea5ded68742 tempest-ServerTagsTestJSON-576240110 tempest-ServerTagsTestJSON-576240110-project-member] Expecting reply to msg a4e0440ad4194670aea0857d20366b76 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2293.860898] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4e0440ad4194670aea0857d20366b76 [ 2295.891233] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2299.153279] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-7d37e619-4fa9-4c18-8172-cc4e2d9f1458 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 1ddf2594d0924de2aa1e2a28d9aad0df in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2299.161977] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ddf2594d0924de2aa1e2a28d9aad0df [ 2299.162437] env[62740]: DEBUG oslo_concurrency.lockutils [None req-7d37e619-4fa9-4c18-8172-cc4e2d9f1458 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "0f438d9b-394a-465c-97ae-8393bdc3e1cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2330.205792] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9782ec6d85d64a129dbda1e4bfee901e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2330.215548] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9782ec6d85d64a129dbda1e4bfee901e [ 2341.845070] env[62740]: WARNING oslo_vmware.rw_handles [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2341.845070] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2341.845070] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2341.845070] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2341.845070] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2341.845070] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 2341.845070] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2341.845070] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2341.845070] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2341.845070] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2341.845070] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2341.845070] env[62740]: ERROR oslo_vmware.rw_handles [ 2341.845752] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/058238dd-496f-49c2-8420-8017836f4b27/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2341.847830] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2341.848100] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Copying Virtual Disk [datastore2] vmware_temp/058238dd-496f-49c2-8420-8017836f4b27/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/058238dd-496f-49c2-8420-8017836f4b27/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2341.848392] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c5be0dfa-2ee0-4765-96f8-68e51e2eac68 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2341.856288] env[62740]: DEBUG oslo_vmware.api [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 2341.856288] env[62740]: value = "task-640374" [ 2341.856288] env[62740]: _type = "Task" [ 2341.856288] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2341.864259] env[62740]: DEBUG oslo_vmware.api [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': task-640374, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2342.367396] env[62740]: DEBUG oslo_vmware.exceptions [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2342.367676] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2342.368259] env[62740]: ERROR nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2342.368259] env[62740]: Faults: ['InvalidArgument'] [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Traceback (most recent call last): [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] yield resources [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] self.driver.spawn(context, instance, image_meta, [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] self._fetch_image_if_missing(context, vi) [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] image_cache(vi, tmp_image_ds_loc) [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] vm_util.copy_virtual_disk( [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] session._wait_for_task(vmdk_copy_task) [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] return self.wait_for_task(task_ref) [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] return evt.wait() [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] result = hub.switch() [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] return self.greenlet.switch() [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] self.f(*self.args, **self.kw) [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] raise exceptions.translate_fault(task_info.error) [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Faults: ['InvalidArgument'] [ 2342.368259] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] [ 2342.369214] env[62740]: INFO nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Terminating instance [ 2342.370326] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2342.370442] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2342.370585] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4be70fe7-42d8-495a-bca8-649e7cd8cde4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.372831] env[62740]: DEBUG nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2342.373034] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2342.374007] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6651d8e9-39f7-44c4-a8b8-52c6c3f0a995 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.380904] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2342.381143] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9eb4edd0-7c40-41c7-901e-043ed2edc6c0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.383094] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2342.383273] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2342.384255] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abdc9c42-105d-4e35-a3bb-bee2a27ec90c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.388789] env[62740]: DEBUG oslo_vmware.api [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Waiting for the task: (returnval){ [ 2342.388789] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52d20e29-59ba-85ea-0c67-a84bd74828df" [ 2342.388789] env[62740]: _type = "Task" [ 2342.388789] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2342.395761] env[62740]: DEBUG oslo_vmware.api [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52d20e29-59ba-85ea-0c67-a84bd74828df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2342.447332] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2342.447572] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2342.447801] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Deleting the datastore file [datastore2] 4ee71d81-7d8b-42f8-a27c-b4645169fa3e {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2342.448114] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50aef2a4-6439-4936-a2c8-71c4383114d5 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.453840] env[62740]: DEBUG oslo_vmware.api [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 2342.453840] env[62740]: value = "task-640376" [ 2342.453840] env[62740]: _type = "Task" [ 2342.453840] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2342.461468] env[62740]: DEBUG oslo_vmware.api [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': task-640376, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2342.899664] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2342.900094] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Creating directory with path [datastore2] vmware_temp/d7c162db-beee-424b-a43e-d50ac366c6c7/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2342.900185] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbad81d0-cb87-49e4-b530-9f7fdc072201 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.911036] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Created directory with path [datastore2] vmware_temp/d7c162db-beee-424b-a43e-d50ac366c6c7/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2342.911230] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Fetch image to [datastore2] vmware_temp/d7c162db-beee-424b-a43e-d50ac366c6c7/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2342.911403] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/d7c162db-beee-424b-a43e-d50ac366c6c7/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2342.912110] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b67724-3fbb-4802-9c67-786599bda2fe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.918519] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d8dccd-b8ac-4fa9-9ff5-8c66e53817ff {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.927092] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d99f72b-5f25-443e-9738-74589047a08b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.960213] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f52860-0d99-4ece-950c-0cba0fcd6c38 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.966681] env[62740]: DEBUG oslo_vmware.api [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Task: {'id': task-640376, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081992} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2342.968042] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2342.968234] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2342.968409] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2342.968583] env[62740]: INFO nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2342.970599] env[62740]: DEBUG nova.compute.claims [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2342.970765] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2342.970975] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2342.972844] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 450966ba653d4d509fbfd56c333aac7d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2342.973635] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-583261db-4179-483b-903b-7cb71f191e50 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.994068] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2343.012953] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 450966ba653d4d509fbfd56c333aac7d [ 2343.128856] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af0b0cbc-1e21-46b8-9903-76f48dac39fe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.135695] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e480007-98d6-4e26-aba7-7dea77fa08e6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.164865] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2343.165617] env[62740]: ERROR nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 174f7655-3fb8-458a-8e9c-108936afe738. [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Traceback (most recent call last): [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] result = getattr(controller, method)(*args, **kwargs) [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self._get(image_id) [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] resp, body = self.http_client.get(url, headers=header) [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self.request(url, 'GET', **kwargs) [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self._handle_response(resp) [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] raise exc.from_response(resp, resp.content) [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] During handling of the above exception, another exception occurred: [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Traceback (most recent call last): [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] yield resources [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self.driver.spawn(context, instance, image_meta, [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self._fetch_image_if_missing(context, vi) [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] image_fetch(context, vi, tmp_image_ds_loc) [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] images.fetch_image( [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2343.165617] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] metadata = IMAGE_API.get(context, image_ref) [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return session.show(context, image_id, [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] _reraise_translated_image_exception(image_id) [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] raise new_exc.with_traceback(exc_trace) [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] result = getattr(controller, method)(*args, **kwargs) [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self._get(image_id) [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] resp, body = self.http_client.get(url, headers=header) [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self.request(url, 'GET', **kwargs) [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self._handle_response(resp) [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] raise exc.from_response(resp, resp.content) [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] nova.exception.ImageNotAuthorized: Not authorized for image 174f7655-3fb8-458a-8e9c-108936afe738. [ 2343.166745] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2343.166745] env[62740]: INFO nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Terminating instance [ 2343.167813] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fbb86c-8d2e-43db-ae1b-0318d8674fe9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.170275] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2343.170492] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2343.171092] env[62740]: DEBUG nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2343.171282] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2343.171496] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56c5aff6-7e06-4417-941c-9534bed52acb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.173705] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5845719-9157-4191-ab52-bce098adbb5f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.184381] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5bd4b50-f792-4c12-ae20-ce1fe63fa2c4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.189247] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2343.189426] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2343.190403] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-733e68fa-252e-4d61-9bff-a62ab927b8ea {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.202100] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2343.202513] env[62740]: DEBUG nova.compute.provider_tree [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2343.202976] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg ad9fabc29a3a4fe79634be6357f7228f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2343.203978] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b8abaff-77a7-4f43-878a-8562a4da3015 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.206585] env[62740]: DEBUG oslo_vmware.api [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for the task: (returnval){ [ 2343.206585] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52442215-7740-e841-018c-d3b23193c304" [ 2343.206585] env[62740]: _type = "Task" [ 2343.206585] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2343.210011] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad9fabc29a3a4fe79634be6357f7228f [ 2343.210906] env[62740]: DEBUG nova.scheduler.client.report [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2343.213044] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 4bf1a9247252411d83c326aa7eaf7fe5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2343.216703] env[62740]: DEBUG oslo_vmware.api [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52442215-7740-e841-018c-d3b23193c304, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2343.226140] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bf1a9247252411d83c326aa7eaf7fe5 [ 2343.226939] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.256s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2343.227501] env[62740]: ERROR nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2343.227501] env[62740]: Faults: ['InvalidArgument'] [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Traceback (most recent call last): [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] self.driver.spawn(context, instance, image_meta, [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] self._fetch_image_if_missing(context, vi) [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] image_cache(vi, tmp_image_ds_loc) [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] vm_util.copy_virtual_disk( [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] session._wait_for_task(vmdk_copy_task) [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] return self.wait_for_task(task_ref) [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] return evt.wait() [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] result = hub.switch() [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] return self.greenlet.switch() [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] self.f(*self.args, **self.kw) [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] raise exceptions.translate_fault(task_info.error) [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Faults: ['InvalidArgument'] [ 2343.227501] env[62740]: ERROR nova.compute.manager [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] [ 2343.228606] env[62740]: DEBUG nova.compute.utils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2343.229966] env[62740]: DEBUG nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Build of instance 4ee71d81-7d8b-42f8-a27c-b4645169fa3e was re-scheduled: A specified parameter was not correct: fileType [ 2343.229966] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2343.230329] env[62740]: DEBUG nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2343.230506] env[62740]: DEBUG nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2343.230678] env[62740]: DEBUG nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2343.230838] env[62740]: DEBUG nova.network.neutron [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2343.309904] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2343.310184] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2343.310323] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Deleting the datastore file [datastore2] b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2343.310589] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85b4b8d2-dab4-4551-995b-6ea44ab46778 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.317200] env[62740]: DEBUG oslo_vmware.api [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Waiting for the task: (returnval){ [ 2343.317200] env[62740]: value = "task-640378" [ 2343.317200] env[62740]: _type = "Task" [ 2343.317200] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2343.324674] env[62740]: DEBUG oslo_vmware.api [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Task: {'id': task-640378, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2343.484223] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 59a03aecf5934caaae628cfaf92a2f6f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2343.496349] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59a03aecf5934caaae628cfaf92a2f6f [ 2343.496935] env[62740]: DEBUG nova.network.neutron [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2343.497429] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 2df650c24204418a991ab0ff83ad2c5b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2343.506745] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2df650c24204418a991ab0ff83ad2c5b [ 2343.507404] env[62740]: INFO nova.compute.manager [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Took 0.28 seconds to deallocate network for instance. [ 2343.509150] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 1748fff87df64e589b033a6007e14435 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2343.545126] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1748fff87df64e589b033a6007e14435 [ 2343.547729] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 67814c4d1faa4e55b1f6d7cf57e2a9e4 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2343.577918] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67814c4d1faa4e55b1f6d7cf57e2a9e4 [ 2343.602546] env[62740]: INFO nova.scheduler.client.report [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Deleted allocations for instance 4ee71d81-7d8b-42f8-a27c-b4645169fa3e [ 2343.608782] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 3e61c9e55b59451c88cbfcb3c345f9f5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2343.625640] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e61c9e55b59451c88cbfcb3c345f9f5 [ 2343.626362] env[62740]: DEBUG oslo_concurrency.lockutils [None req-a3c571ed-6ec0-41c2-b7e6-1f51ad99077c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "4ee71d81-7d8b-42f8-a27c-b4645169fa3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.623s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2343.626651] env[62740]: DEBUG oslo_concurrency.lockutils [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "4ee71d81-7d8b-42f8-a27c-b4645169fa3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.589s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2343.626895] env[62740]: DEBUG oslo_concurrency.lockutils [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "4ee71d81-7d8b-42f8-a27c-b4645169fa3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2343.627151] env[62740]: DEBUG oslo_concurrency.lockutils [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "4ee71d81-7d8b-42f8-a27c-b4645169fa3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2343.627363] env[62740]: DEBUG oslo_concurrency.lockutils [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "4ee71d81-7d8b-42f8-a27c-b4645169fa3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2343.629657] env[62740]: INFO nova.compute.manager [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Terminating instance [ 2343.631900] env[62740]: DEBUG oslo_concurrency.lockutils [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquiring lock "refresh_cache-4ee71d81-7d8b-42f8-a27c-b4645169fa3e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2343.632074] env[62740]: DEBUG oslo_concurrency.lockutils [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "refresh_cache-4ee71d81-7d8b-42f8-a27c-b4645169fa3e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2343.632256] env[62740]: DEBUG nova.network.neutron [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2343.632649] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg cfbe56c7e4a3464495625b83eab96d9d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2343.641991] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfbe56c7e4a3464495625b83eab96d9d [ 2343.670551] env[62740]: DEBUG nova.network.neutron [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2343.717130] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2343.717317] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Creating directory with path [datastore2] vmware_temp/7f233efc-293c-4f52-a69f-ad0e67ac646a/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2343.717540] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21fca0e6-5e0c-42db-9111-2c836476e20a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.728580] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Created directory with path [datastore2] vmware_temp/7f233efc-293c-4f52-a69f-ad0e67ac646a/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2343.728776] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Fetch image to [datastore2] vmware_temp/7f233efc-293c-4f52-a69f-ad0e67ac646a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2343.728991] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/7f233efc-293c-4f52-a69f-ad0e67ac646a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2343.729844] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b54695ee-64d1-454c-b7e8-70f579e6b146 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.738413] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27987703-cbeb-4c1b-9602-349021c72075 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.748384] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec1041f-efbc-432e-b769-2ccb4333717e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.781379] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07e2297-3b32-4903-9ac4-10cc61c63020 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.787199] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d24fb7e4-2b03-49a9-8529-d15484469455 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.809624] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2343.812421] env[62740]: DEBUG nova.network.neutron [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2343.812928] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 5b098968d4eb41bbb195f2cb45e59ec7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2343.821918] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b098968d4eb41bbb195f2cb45e59ec7 [ 2343.822526] env[62740]: DEBUG oslo_concurrency.lockutils [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Releasing lock "refresh_cache-4ee71d81-7d8b-42f8-a27c-b4645169fa3e" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2343.822985] env[62740]: DEBUG nova.compute.manager [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2343.823236] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2343.826501] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f865181b-95b4-4d30-94e9-9ff8a8e5a4c9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.828315] env[62740]: DEBUG oslo_vmware.api [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Task: {'id': task-640378, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096884} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2343.830496] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2343.830683] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2343.830856] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2343.831038] env[62740]: INFO nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Took 0.66 seconds to destroy the instance on the hypervisor. [ 2343.833484] env[62740]: DEBUG nova.compute.claims [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2343.833719] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2343.833908] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2343.835928] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 966605107e1c49afb589eaf4adf65b96 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2343.843764] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d962d0-507a-4fc0-80bc-0494fb15cbe9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.872275] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4ee71d81-7d8b-42f8-a27c-b4645169fa3e could not be found. [ 2343.872501] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2343.872683] env[62740]: INFO nova.compute.manager [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2343.872943] env[62740]: DEBUG oslo.service.loopingcall [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2343.873973] env[62740]: DEBUG oslo_vmware.rw_handles [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7f233efc-293c-4f52-a69f-ad0e67ac646a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2343.875484] env[62740]: DEBUG nova.compute.manager [-] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2343.875623] env[62740]: DEBUG nova.network.neutron [-] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2343.930967] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 966605107e1c49afb589eaf4adf65b96 [ 2343.931934] env[62740]: DEBUG nova.network.neutron [-] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2343.932443] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dc6f4a5c93a54f819e7ef8962cbc36a3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2343.938223] env[62740]: DEBUG oslo_vmware.rw_handles [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2343.938334] env[62740]: DEBUG oslo_vmware.rw_handles [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7f233efc-293c-4f52-a69f-ad0e67ac646a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2343.938998] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc6f4a5c93a54f819e7ef8962cbc36a3 [ 2343.939329] env[62740]: DEBUG nova.network.neutron [-] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2343.939689] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 93c8e237862b4644b3492f57b8f87966 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2343.947926] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93c8e237862b4644b3492f57b8f87966 [ 2343.948415] env[62740]: INFO nova.compute.manager [-] [instance: 4ee71d81-7d8b-42f8-a27c-b4645169fa3e] Took 0.07 seconds to deallocate network for instance. [ 2343.951979] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 26073fce3a6e4c20a9405e1affff4df5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2343.978387] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26073fce3a6e4c20a9405e1affff4df5 [ 2343.991656] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg 8e2ce3e7d0954303a4f4901472e2fdaa in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2344.029412] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e2ce3e7d0954303a4f4901472e2fdaa [ 2344.033311] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8bb5fe-d569-4f67-89a9-4583cecc2d85 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.036359] env[62740]: DEBUG oslo_concurrency.lockutils [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Lock "4ee71d81-7d8b-42f8-a27c-b4645169fa3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.410s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2344.036697] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-22dd17ee-e645-48d6-81bf-2ad2c631be62 tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Expecting reply to msg b86bf8c4904d4368af74abe5de0c3ef7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2344.042533] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b69b019-5d00-466f-be3f-3729174289cd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.047070] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b86bf8c4904d4368af74abe5de0c3ef7 [ 2344.072449] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a473d468-dde1-4367-aae8-4b6a69f0dc94 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.079843] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eedd183-c27d-442f-be7e-bc8dcfd0f23b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.093194] env[62740]: DEBUG nova.compute.provider_tree [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2344.093653] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg bacb547ca88848748d57572509b51aab in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2344.100351] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bacb547ca88848748d57572509b51aab [ 2344.101178] env[62740]: DEBUG nova.scheduler.client.report [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2344.103351] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg ba1154aeaf7043d591ad876e38501360 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2344.115277] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba1154aeaf7043d591ad876e38501360 [ 2344.115972] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.282s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2344.116683] env[62740]: ERROR nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 174f7655-3fb8-458a-8e9c-108936afe738. [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Traceback (most recent call last): [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] result = getattr(controller, method)(*args, **kwargs) [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self._get(image_id) [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] resp, body = self.http_client.get(url, headers=header) [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self.request(url, 'GET', **kwargs) [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self._handle_response(resp) [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] raise exc.from_response(resp, resp.content) [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] During handling of the above exception, another exception occurred: [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Traceback (most recent call last): [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self.driver.spawn(context, instance, image_meta, [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self._fetch_image_if_missing(context, vi) [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] image_fetch(context, vi, tmp_image_ds_loc) [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] images.fetch_image( [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] metadata = IMAGE_API.get(context, image_ref) [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2344.116683] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return session.show(context, image_id, [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] _reraise_translated_image_exception(image_id) [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] raise new_exc.with_traceback(exc_trace) [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] result = getattr(controller, method)(*args, **kwargs) [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self._get(image_id) [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] resp, body = self.http_client.get(url, headers=header) [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self.request(url, 'GET', **kwargs) [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self._handle_response(resp) [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] raise exc.from_response(resp, resp.content) [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] nova.exception.ImageNotAuthorized: Not authorized for image 174f7655-3fb8-458a-8e9c-108936afe738. [ 2344.117777] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2344.117777] env[62740]: DEBUG nova.compute.utils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Not authorized for image 174f7655-3fb8-458a-8e9c-108936afe738. {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2344.118686] env[62740]: DEBUG nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Build of instance b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6 was re-scheduled: Not authorized for image 174f7655-3fb8-458a-8e9c-108936afe738. {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2344.119196] env[62740]: DEBUG nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2344.119374] env[62740]: DEBUG nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2344.119540] env[62740]: DEBUG nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2344.119705] env[62740]: DEBUG nova.network.neutron [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2344.210479] env[62740]: DEBUG neutronclient.v2_0.client [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62740) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2344.212378] env[62740]: ERROR nova.compute.manager [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Traceback (most recent call last): [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] result = getattr(controller, method)(*args, **kwargs) [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self._get(image_id) [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] resp, body = self.http_client.get(url, headers=header) [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self.request(url, 'GET', **kwargs) [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self._handle_response(resp) [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] raise exc.from_response(resp, resp.content) [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] During handling of the above exception, another exception occurred: [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Traceback (most recent call last): [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self.driver.spawn(context, instance, image_meta, [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self._fetch_image_if_missing(context, vi) [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] image_fetch(context, vi, tmp_image_ds_loc) [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] images.fetch_image( [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] metadata = IMAGE_API.get(context, image_ref) [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2344.212378] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return session.show(context, image_id, [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] _reraise_translated_image_exception(image_id) [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] raise new_exc.with_traceback(exc_trace) [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] result = getattr(controller, method)(*args, **kwargs) [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self._get(image_id) [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] resp, body = self.http_client.get(url, headers=header) [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self.request(url, 'GET', **kwargs) [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self._handle_response(resp) [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] raise exc.from_response(resp, resp.content) [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] nova.exception.ImageNotAuthorized: Not authorized for image 174f7655-3fb8-458a-8e9c-108936afe738. [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] During handling of the above exception, another exception occurred: [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Traceback (most recent call last): [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self._build_and_run_instance(context, instance, image, [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] raise exception.RescheduledException( [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] nova.exception.RescheduledException: Build of instance b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6 was re-scheduled: Not authorized for image 174f7655-3fb8-458a-8e9c-108936afe738. [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] During handling of the above exception, another exception occurred: [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Traceback (most recent call last): [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] ret = obj(*args, **kwargs) [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] exception_handler_v20(status_code, error_body) [ 2344.213425] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] raise client_exc(message=error_message, [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Neutron server returns request_ids: ['req-8e5ee606-db26-416a-92d2-64ba451932fa'] [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] During handling of the above exception, another exception occurred: [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Traceback (most recent call last): [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self._deallocate_network(context, instance, requested_networks) [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self.network_api.deallocate_for_instance( [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] data = neutron.list_ports(**search_opts) [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] ret = obj(*args, **kwargs) [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self.list('ports', self.ports_path, retrieve_all, [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] ret = obj(*args, **kwargs) [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] for r in self._pagination(collection, path, **params): [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] res = self.get(path, params=params) [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] ret = obj(*args, **kwargs) [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self.retry_request("GET", action, body=body, [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] ret = obj(*args, **kwargs) [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self.do_request(method, action, body=body, [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] ret = obj(*args, **kwargs) [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self._handle_fault_response(status_code, replybody, resp) [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] raise exception.Unauthorized() [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] nova.exception.Unauthorized: Not authorized. [ 2344.214722] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2344.215838] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 0efc0dd8a6ba46b389a0b42dc5065ff6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2344.246476] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0efc0dd8a6ba46b389a0b42dc5065ff6 [ 2344.267547] env[62740]: INFO nova.scheduler.client.report [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Deleted allocations for instance b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6 [ 2344.273918] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg a985959ff1b4463b80a9148e9b0d5140 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2344.284611] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a985959ff1b4463b80a9148e9b0d5140 [ 2344.285555] env[62740]: DEBUG oslo_concurrency.lockutils [None req-12153b06-86fc-41be-9a04-9a1c75be1a02 tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 575.957s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2344.285555] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 381.011s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2344.285761] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Acquiring lock "b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2344.285967] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2344.286165] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2344.288697] env[62740]: INFO nova.compute.manager [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Terminating instance [ 2344.290448] env[62740]: DEBUG nova.compute.manager [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2344.290640] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2344.291112] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a16d580-08ca-4ff0-85c5-d1edeaf631ed {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.299540] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80370726-ad1b-4f6a-8704-4a3af70eebb2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.324888] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6 could not be found. [ 2344.325049] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2344.325278] env[62740]: INFO nova.compute.manager [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2344.325486] env[62740]: DEBUG oslo.service.loopingcall [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2344.325694] env[62740]: DEBUG nova.compute.manager [-] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2344.325793] env[62740]: DEBUG nova.network.neutron [-] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2344.402062] env[62740]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62740) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2344.402337] env[62740]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-3ffa3bd1-2998-4d38-996d-68ce16aad2b7'] [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2344.402835] env[62740]: ERROR oslo.service.loopingcall [ 2344.404240] env[62740]: ERROR nova.compute.manager [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2344.405035] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 88e82f28e0214676a620f04757267207 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2344.433971] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88e82f28e0214676a620f04757267207 [ 2344.435703] env[62740]: ERROR nova.compute.manager [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Traceback (most recent call last): [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] ret = obj(*args, **kwargs) [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] exception_handler_v20(status_code, error_body) [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] raise client_exc(message=error_message, [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Neutron server returns request_ids: ['req-3ffa3bd1-2998-4d38-996d-68ce16aad2b7'] [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] During handling of the above exception, another exception occurred: [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Traceback (most recent call last): [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self._delete_instance(context, instance, bdms) [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self._shutdown_instance(context, instance, bdms) [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self._try_deallocate_network(context, instance, requested_networks) [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] with excutils.save_and_reraise_exception(): [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self.force_reraise() [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] raise self.value [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] _deallocate_network_with_retries() [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return evt.wait() [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] result = hub.switch() [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self.greenlet.switch() [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] result = func(*self.args, **self.kw) [ 2344.435703] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] result = f(*args, **kwargs) [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self._deallocate_network( [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self.network_api.deallocate_for_instance( [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] data = neutron.list_ports(**search_opts) [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] ret = obj(*args, **kwargs) [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self.list('ports', self.ports_path, retrieve_all, [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] ret = obj(*args, **kwargs) [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] for r in self._pagination(collection, path, **params): [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] res = self.get(path, params=params) [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] ret = obj(*args, **kwargs) [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self.retry_request("GET", action, body=body, [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] ret = obj(*args, **kwargs) [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] return self.do_request(method, action, body=body, [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] ret = obj(*args, **kwargs) [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] self._handle_fault_response(status_code, replybody, resp) [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2344.436842] env[62740]: ERROR nova.compute.manager [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] [ 2344.437811] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg b78afcba6cf9427ab771fd2230f31220 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2344.461525] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b78afcba6cf9427ab771fd2230f31220 [ 2344.462604] env[62740]: DEBUG oslo_concurrency.lockutils [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Lock "b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.177s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2344.463191] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg b8cdbe5d2295482d9f4c26fe73ac2e53 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2344.474289] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8cdbe5d2295482d9f4c26fe73ac2e53 [ 2344.475459] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg d7a8962ed1bf41c284eaf9457d30958b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2344.491246] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7a8962ed1bf41c284eaf9457d30958b [ 2344.493116] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Expecting reply to msg 95903eb47dfc43faa19b52cdc144f777 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2344.519506] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95903eb47dfc43faa19b52cdc144f777 [ 2344.520609] env[62740]: INFO nova.compute.manager [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] [instance: b51a6eb5-a52d-4a0c-9b9d-e52e96e492c6] Successfully reverted task state from None on failure for instance. [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server [None req-b95bda5f-b33d-472d-b90b-e97e5e39634f tempest-ServersTestMultiNic-1543791748 tempest-ServersTestMultiNic-1543791748-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-3ffa3bd1-2998-4d38-996d-68ce16aad2b7'] [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server raise self.value [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server raise self.value [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server raise self.value [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 2344.524502] env[62740]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server raise self.value [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server raise self.value [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2344.526112] env[62740]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2344.527670] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2344.527670] env[62740]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2344.527670] env[62740]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2344.527670] env[62740]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2344.527670] env[62740]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2344.527670] env[62740]: ERROR oslo_messaging.rpc.server [ 2347.890435] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2347.890762] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2348.891250] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2348.891563] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2349.891311] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2349.891723] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 87fb6b4d16e44137af83cbb6dff37197 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2349.902937] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87fb6b4d16e44137af83cbb6dff37197 [ 2349.904005] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2349.904242] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2349.904417] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2349.904576] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2349.906069] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd34af78-d15f-4972-bcb1-5df833b00b1b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.914705] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebfd0243-0323-411a-a4de-8b82ab5ad480 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.928858] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e00aac1-698a-49f7-85c7-7351c9a4e7ef {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.934972] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0abb1873-bb50-446d-9bed-ce0d84cddb30 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.962945] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181683MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2349.963102] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2349.963295] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2349.964133] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg c7cb79ab9cac453ea50ccc111f55bc57 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2349.989357] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7cb79ab9cac453ea50ccc111f55bc57 [ 2349.992102] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 4dfddf2e84094a0ea0516d0960f3c217 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2350.001377] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4dfddf2e84094a0ea0516d0960f3c217 [ 2350.107139] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 1b975b29-fbaa-4385-9bf9-33496b4ed129 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2350.107324] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b75490e6-ded3-4aa7-89ff-f4963fe82cfe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2350.107458] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2350.107586] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 0f438d9b-394a-465c-97ae-8393bdc3e1cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2350.107707] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2ad0d938-304b-4535-8362-099c3a6864f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2350.107826] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a925b4b2-7320-4c28-b083-c15adf060a00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2350.108033] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2350.108181] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2350.186172] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726ddbf9-9945-4a9d-9bae-ec79f7c3d99d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.193753] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1ddfde-3f8a-4b24-b9c1-c60cdf0e70e6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.223680] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a64c68-f53a-4fe2-ba43-e8f32ffbeb6f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.230637] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d055ee0-edeb-4a94-93af-391a7d06b734 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.243221] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2350.243661] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 0e786f43e40444d996e99ad71bb52bca in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2350.250780] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e786f43e40444d996e99ad71bb52bca [ 2350.251608] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2350.253797] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg e28a076154ad45e6a6180d354444db78 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2350.266314] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e28a076154ad45e6a6180d354444db78 [ 2350.266955] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2350.267147] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.304s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2351.262103] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2351.262388] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2351.262527] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2351.262649] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2351.263242] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2b91ff92021246ec8b6152f230ea5b96 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2351.278955] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b91ff92021246ec8b6152f230ea5b96 [ 2351.280698] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2351.280847] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2351.280981] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2351.281126] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2351.281253] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2351.281463] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2351.281529] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2352.890854] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2353.886579] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2353.887310] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 81cf1c32e39149a19460eea05a03355f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2353.901308] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81cf1c32e39149a19460eea05a03355f [ 2353.905019] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2355.891293] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2358.891476] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2358.891745] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Cleaning up deleted instances with incomplete migration {{(pid=62740) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11255}} [ 2358.891902] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7631753bf3a34e469351c5f9fbff7feb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2358.902130] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7631753bf3a34e469351c5f9fbff7feb [ 2359.903019] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2359.903357] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Cleaning up deleted instances {{(pid=62740) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11217}} [ 2359.903812] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 7501827495ee4bea8165cb416fe79a04 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2359.912980] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7501827495ee4bea8165cb416fe79a04 [ 2359.913516] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] There are 0 instances to clean {{(pid=62740) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 2361.445298] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-2217d49f-a399-4c46-a5d9-376b319659c6 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Expecting reply to msg d31745203e5045ea82a3a6074e077f7f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2361.456748] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d31745203e5045ea82a3a6074e077f7f [ 2361.457212] env[62740]: DEBUG oslo_concurrency.lockutils [None req-2217d49f-a399-4c46-a5d9-376b319659c6 tempest-InstanceActionsTestJSON-779540905 tempest-InstanceActionsTestJSON-779540905-project-member] Acquiring lock "2ad0d938-304b-4535-8362-099c3a6864f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2367.890855] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2367.891344] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2ec386bed19b4855be9a8a753d9d9321 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2367.898734] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ec386bed19b4855be9a8a753d9d9321 [ 2382.568035] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2382.568427] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Getting list of instances from cluster (obj){ [ 2382.568427] env[62740]: value = "domain-c8" [ 2382.568427] env[62740]: _type = "ClusterComputeResource" [ 2382.568427] env[62740]: } {{(pid=62740) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2382.569505] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4cd6d5-a9b2-4693-8e5a-700496d57330 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.583541] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Got total of 6 instances {{(pid=62740) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2382.584178] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 980605701096473bbb202884797d4300 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2382.600505] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 980605701096473bbb202884797d4300 [ 2389.902021] env[62740]: WARNING oslo_vmware.rw_handles [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2389.902021] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2389.902021] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2389.902021] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2389.902021] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2389.902021] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 2389.902021] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2389.902021] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2389.902021] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2389.902021] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2389.902021] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2389.902021] env[62740]: ERROR oslo_vmware.rw_handles [ 2389.902021] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/7f233efc-293c-4f52-a69f-ad0e67ac646a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2389.903512] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2389.903920] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Copying Virtual Disk [datastore2] vmware_temp/7f233efc-293c-4f52-a69f-ad0e67ac646a/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/7f233efc-293c-4f52-a69f-ad0e67ac646a/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2389.904393] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-854ea291-c53f-4f14-8b44-17a15efe778f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.913101] env[62740]: DEBUG oslo_vmware.api [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for the task: (returnval){ [ 2389.913101] env[62740]: value = "task-640379" [ 2389.913101] env[62740]: _type = "Task" [ 2389.913101] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2389.922116] env[62740]: DEBUG oslo_vmware.api [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Task: {'id': task-640379, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2390.424057] env[62740]: DEBUG oslo_vmware.exceptions [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2390.424057] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2390.424540] env[62740]: ERROR nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2390.424540] env[62740]: Faults: ['InvalidArgument'] [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Traceback (most recent call last): [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] yield resources [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] self.driver.spawn(context, instance, image_meta, [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] self._fetch_image_if_missing(context, vi) [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] image_cache(vi, tmp_image_ds_loc) [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] vm_util.copy_virtual_disk( [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] session._wait_for_task(vmdk_copy_task) [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] return self.wait_for_task(task_ref) [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] return evt.wait() [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] result = hub.switch() [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] return self.greenlet.switch() [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] self.f(*self.args, **self.kw) [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] raise exceptions.translate_fault(task_info.error) [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Faults: ['InvalidArgument'] [ 2390.424540] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] [ 2390.425441] env[62740]: INFO nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Terminating instance [ 2390.426464] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2390.426674] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2390.426939] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7bbd5e6-d462-40e1-9634-4d68f30869c4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.429192] env[62740]: DEBUG nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2390.429387] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2390.430166] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67fabf7-88a8-4a0f-a921-483ea9b0df9e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.437357] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2390.438280] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d1c06da-8e4a-4dd6-b92c-ed96910cda8e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.439655] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2390.439830] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2390.440533] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff4d358e-7ec3-4286-9c6b-4329c470d1aa {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.445511] env[62740]: DEBUG oslo_vmware.api [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Waiting for the task: (returnval){ [ 2390.445511] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52821677-30ce-f8ae-7ea1-8ffbe16ab4c6" [ 2390.445511] env[62740]: _type = "Task" [ 2390.445511] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2390.452625] env[62740]: DEBUG oslo_vmware.api [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52821677-30ce-f8ae-7ea1-8ffbe16ab4c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2390.513468] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2390.513642] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2390.513776] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Deleting the datastore file [datastore2] 1b975b29-fbaa-4385-9bf9-33496b4ed129 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2390.514043] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79d05126-e3a8-4894-ab87-ae2ed937f0a1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.520484] env[62740]: DEBUG oslo_vmware.api [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for the task: (returnval){ [ 2390.520484] env[62740]: value = "task-640381" [ 2390.520484] env[62740]: _type = "Task" [ 2390.520484] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2390.528111] env[62740]: DEBUG oslo_vmware.api [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Task: {'id': task-640381, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2390.955696] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2390.956036] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Creating directory with path [datastore2] vmware_temp/a8b2c5e5-5200-479e-8b36-bad2a4252679/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2390.956249] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2e7756e-f192-4e74-98fb-e91331321c16 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.966672] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Created directory with path [datastore2] vmware_temp/a8b2c5e5-5200-479e-8b36-bad2a4252679/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2390.966867] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Fetch image to [datastore2] vmware_temp/a8b2c5e5-5200-479e-8b36-bad2a4252679/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2390.967084] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/a8b2c5e5-5200-479e-8b36-bad2a4252679/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2390.967776] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670f4dff-6825-4214-8689-45052b47f8e7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.975430] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43881740-d634-4354-95c4-9776957bb769 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.651053] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd69822-0563-4599-aad6-6b820eed9d06 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.682855] env[62740]: DEBUG oslo_vmware.api [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Task: {'id': task-640381, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064478} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2391.683335] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2391.683522] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2391.683695] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2391.683865] env[62740]: INFO nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Took 1.25 seconds to destroy the instance on the hypervisor. [ 2391.685833] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7258c2-f63f-4639-9157-b4b95b58ce74 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.688310] env[62740]: DEBUG nova.compute.claims [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2391.688479] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2391.688690] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2391.690571] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 856c8fe9459342d88ee4abf20be76c89 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2391.694563] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9d910822-fa44-4560-8f33-61f381f2f78c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.717610] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2391.729752] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 856c8fe9459342d88ee4abf20be76c89 [ 2391.769160] env[62740]: DEBUG oslo_vmware.rw_handles [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a8b2c5e5-5200-479e-8b36-bad2a4252679/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2391.830670] env[62740]: DEBUG oslo_vmware.rw_handles [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2391.830974] env[62740]: DEBUG oslo_vmware.rw_handles [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a8b2c5e5-5200-479e-8b36-bad2a4252679/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2391.871077] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26989b33-e6b1-44ed-ad93-fca1c4156312 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.878178] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7adf8d6-1f7b-4593-bf2b-520ebe63d246 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.907928] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f93e05-a944-425a-bbed-a28295f55720 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.914697] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79e8862-d5a0-4846-b54d-2aea91fd4cdc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.927184] env[62740]: DEBUG nova.compute.provider_tree [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2391.927669] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg f990d0cb9c204d6eb8ef093605a5c543 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2391.934775] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f990d0cb9c204d6eb8ef093605a5c543 [ 2391.935658] env[62740]: DEBUG nova.scheduler.client.report [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2391.937944] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 776887ca04db47c5b9fff3d7d593d1b8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2391.949882] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 776887ca04db47c5b9fff3d7d593d1b8 [ 2391.950690] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.262s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2391.951731] env[62740]: ERROR nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2391.951731] env[62740]: Faults: ['InvalidArgument'] [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Traceback (most recent call last): [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] self.driver.spawn(context, instance, image_meta, [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] self._fetch_image_if_missing(context, vi) [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] image_cache(vi, tmp_image_ds_loc) [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] vm_util.copy_virtual_disk( [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] session._wait_for_task(vmdk_copy_task) [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] return self.wait_for_task(task_ref) [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] return evt.wait() [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] result = hub.switch() [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] return self.greenlet.switch() [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] self.f(*self.args, **self.kw) [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] raise exceptions.translate_fault(task_info.error) [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Faults: ['InvalidArgument'] [ 2391.951731] env[62740]: ERROR nova.compute.manager [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] [ 2391.953193] env[62740]: DEBUG nova.compute.utils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2391.953724] env[62740]: DEBUG nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Build of instance 1b975b29-fbaa-4385-9bf9-33496b4ed129 was re-scheduled: A specified parameter was not correct: fileType [ 2391.953724] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2391.954175] env[62740]: DEBUG nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2391.954285] env[62740]: DEBUG nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2391.954455] env[62740]: DEBUG nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2391.954615] env[62740]: DEBUG nova.network.neutron [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2392.309309] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg d92b2a1b49574a7cb3af40e11bf42042 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2392.320599] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d92b2a1b49574a7cb3af40e11bf42042 [ 2392.321180] env[62740]: DEBUG nova.network.neutron [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2392.321773] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg cca1e37734cd449a80073e2c19cfe66e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2392.333964] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cca1e37734cd449a80073e2c19cfe66e [ 2392.334569] env[62740]: INFO nova.compute.manager [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Took 0.38 seconds to deallocate network for instance. [ 2392.336262] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 2fc72994fe624b7a96390c658cf858c9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2392.370020] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fc72994fe624b7a96390c658cf858c9 [ 2392.372690] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg c55f3e2006264d6590d93c4eea53d6f0 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2392.403844] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c55f3e2006264d6590d93c4eea53d6f0 [ 2392.426874] env[62740]: INFO nova.scheduler.client.report [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Deleted allocations for instance 1b975b29-fbaa-4385-9bf9-33496b4ed129 [ 2392.432699] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 9f16380116554faaa92fc18f0f2d92ff in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2392.446662] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f16380116554faaa92fc18f0f2d92ff [ 2392.447207] env[62740]: DEBUG oslo_concurrency.lockutils [None req-e2409c78-811a-443a-98a9-98cfa3b36426 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "1b975b29-fbaa-4385-9bf9-33496b4ed129" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 568.879s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2392.447442] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "1b975b29-fbaa-4385-9bf9-33496b4ed129" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 372.636s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2392.447662] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Acquiring lock "1b975b29-fbaa-4385-9bf9-33496b4ed129-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2392.447865] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "1b975b29-fbaa-4385-9bf9-33496b4ed129-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2392.448045] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "1b975b29-fbaa-4385-9bf9-33496b4ed129-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2392.449922] env[62740]: INFO nova.compute.manager [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Terminating instance [ 2392.451626] env[62740]: DEBUG nova.compute.manager [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2392.451822] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2392.452678] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b90aa647-30c3-4855-9c87-962216017cfe {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.461553] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74a4a56-000f-4872-90ed-dfddbee4cbf3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.485816] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1b975b29-fbaa-4385-9bf9-33496b4ed129 could not be found. [ 2392.486020] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2392.486204] env[62740]: INFO nova.compute.manager [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2392.486443] env[62740]: DEBUG oslo.service.loopingcall [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2392.486650] env[62740]: DEBUG nova.compute.manager [-] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2392.486745] env[62740]: DEBUG nova.network.neutron [-] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2392.502333] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c77f1af938b44ede9c0be491933736aa in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2392.507547] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c77f1af938b44ede9c0be491933736aa [ 2392.507869] env[62740]: DEBUG nova.network.neutron [-] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2392.508250] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ee6ade7595004ae68ed331cc4d7d11fe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2392.515371] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee6ade7595004ae68ed331cc4d7d11fe [ 2392.515797] env[62740]: INFO nova.compute.manager [-] [instance: 1b975b29-fbaa-4385-9bf9-33496b4ed129] Took 0.03 seconds to deallocate network for instance. [ 2392.519097] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 1901b8316836485ea7c9cc13d20c8cbf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2392.542474] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1901b8316836485ea7c9cc13d20c8cbf [ 2392.557540] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 1651c8f3d3f243d4a103e79f6ecf7533 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2392.595323] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1651c8f3d3f243d4a103e79f6ecf7533 [ 2392.598255] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Lock "1b975b29-fbaa-4385-9bf9-33496b4ed129" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.151s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2392.598632] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8e64c5c1-7584-49b6-9d25-c0b1516429a3 tempest-ServerDiskConfigTestJSON-1958840202 tempest-ServerDiskConfigTestJSON-1958840202-project-member] Expecting reply to msg 8861abb1c44c4c8bb6ba849d885eca6f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2392.608772] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8861abb1c44c4c8bb6ba849d885eca6f [ 2403.447206] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "e597c7da-cb9e-413f-9c8e-e41ffbb31338" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2403.447535] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "e597c7da-cb9e-413f-9c8e-e41ffbb31338" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2403.447962] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 5b700c981ca74eba98c523f4a9f8ad64 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2403.458433] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b700c981ca74eba98c523f4a9f8ad64 [ 2403.458865] env[62740]: DEBUG nova.compute.manager [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Starting instance... {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2403.460660] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg b3de3e81430247bcb11607779d9b2afe in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2403.501064] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3de3e81430247bcb11607779d9b2afe [ 2403.516871] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2403.517162] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2403.518615] env[62740]: INFO nova.compute.claims [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2403.520351] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg d9c8f0eef8e44c77800170c2296abe58 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2403.554533] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9c8f0eef8e44c77800170c2296abe58 [ 2403.556395] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 16436818a06f456080459ecb5fc61ba9 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2403.563222] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16436818a06f456080459ecb5fc61ba9 [ 2403.578536] env[62740]: DEBUG nova.scheduler.client.report [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Refreshing inventories for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2403.591316] env[62740]: DEBUG nova.scheduler.client.report [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Updating ProviderTree inventory for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2403.591562] env[62740]: DEBUG nova.compute.provider_tree [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Updating inventory in ProviderTree for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2403.602904] env[62740]: DEBUG nova.scheduler.client.report [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Refreshing aggregate associations for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0, aggregates: None {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2403.619523] env[62740]: DEBUG nova.scheduler.client.report [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Refreshing trait associations for resource provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62740) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2403.693158] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a16f5f5-6d8f-46a5-b0f5-b7f2b82d44cc {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.700896] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb29325f-62a6-46f6-95a3-ea7034c332c7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.729254] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57310a52-421c-4195-aa71-66e81c87ecc2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.735767] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a7fc84-ba2d-4062-89eb-0657c3391358 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.749167] env[62740]: DEBUG nova.compute.provider_tree [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2403.749629] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 8b47704b000a4174a1e5a67811e2240a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2403.758965] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b47704b000a4174a1e5a67811e2240a [ 2403.759821] env[62740]: DEBUG nova.scheduler.client.report [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2403.762017] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg a624659772fa46a187f8486f569818ed in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2403.775337] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a624659772fa46a187f8486f569818ed [ 2403.776016] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.259s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2403.776472] env[62740]: DEBUG nova.compute.manager [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Start building networks asynchronously for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2403.778309] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg a4930e6141f8460792f14db780ec16bf in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2403.811426] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4930e6141f8460792f14db780ec16bf [ 2403.812652] env[62740]: DEBUG nova.compute.utils [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Using /dev/sd instead of None {{(pid=62740) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2403.813243] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 16eb879742b540baa3419df3a9eda878 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2403.814024] env[62740]: DEBUG nova.compute.manager [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Allocating IP information in the background. {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2403.814194] env[62740]: DEBUG nova.network.neutron [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] allocate_for_instance() {{(pid=62740) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2403.831429] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16eb879742b540baa3419df3a9eda878 [ 2403.831974] env[62740]: DEBUG nova.compute.manager [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Start building block device mappings for instance. {{(pid=62740) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2403.833702] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 38dcfef25ce84b3f9e055f3e45cd9d3b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2403.863082] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38dcfef25ce84b3f9e055f3e45cd9d3b [ 2403.866143] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg a3b489a9276941a5a8309772977324e6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2403.887952] env[62740]: DEBUG nova.policy [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd3f5aaf4abae42da9a5ad7044f84647d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d3897fd0522431c87c8830678fd59ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62740) authorize /opt/stack/nova/nova/policy.py:203}} [ 2403.896751] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3b489a9276941a5a8309772977324e6 [ 2403.897862] env[62740]: DEBUG nova.compute.manager [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Start spawning the instance on the hypervisor. {{(pid=62740) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2403.921451] env[62740]: DEBUG nova.virt.hardware [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-04T08:25:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-04T08:25:17Z,direct_url=,disk_format='vmdk',id=174f7655-3fb8-458a-8e9c-108936afe738,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='47f0062f3bf04910bbbb3502a2f3ff28',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-04T08:25:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2403.921810] env[62740]: DEBUG nova.virt.hardware [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Flavor limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2403.921810] env[62740]: DEBUG nova.virt.hardware [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Image limits 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2403.922017] env[62740]: DEBUG nova.virt.hardware [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Flavor pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2403.922171] env[62740]: DEBUG nova.virt.hardware [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Image pref 0:0:0 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2403.922321] env[62740]: DEBUG nova.virt.hardware [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62740) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2403.922529] env[62740]: DEBUG nova.virt.hardware [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2403.922686] env[62740]: DEBUG nova.virt.hardware [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2403.923062] env[62740]: DEBUG nova.virt.hardware [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Got 1 possible topologies {{(pid=62740) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2403.923062] env[62740]: DEBUG nova.virt.hardware [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2403.923182] env[62740]: DEBUG nova.virt.hardware [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62740) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2403.924019] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5741cc56-15a4-4eef-a58d-a995e5e6165e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.933892] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080803e0-90b9-4376-9f80-8f96b34a2648 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.223412] env[62740]: DEBUG nova.network.neutron [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Successfully created port: 9819639e-a998-4a38-a48e-06a40eb6eae9 {{(pid=62740) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2404.808906] env[62740]: DEBUG nova.compute.manager [req-32124005-8bc8-4e7c-a41b-fb49836ffcd1 req-2309aeba-5116-421a-ab80-6b64b58756f6 service nova] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Received event network-vif-plugged-9819639e-a998-4a38-a48e-06a40eb6eae9 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2404.809223] env[62740]: DEBUG oslo_concurrency.lockutils [req-32124005-8bc8-4e7c-a41b-fb49836ffcd1 req-2309aeba-5116-421a-ab80-6b64b58756f6 service nova] Acquiring lock "e597c7da-cb9e-413f-9c8e-e41ffbb31338-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2404.809432] env[62740]: DEBUG oslo_concurrency.lockutils [req-32124005-8bc8-4e7c-a41b-fb49836ffcd1 req-2309aeba-5116-421a-ab80-6b64b58756f6 service nova] Lock "e597c7da-cb9e-413f-9c8e-e41ffbb31338-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2404.809601] env[62740]: DEBUG oslo_concurrency.lockutils [req-32124005-8bc8-4e7c-a41b-fb49836ffcd1 req-2309aeba-5116-421a-ab80-6b64b58756f6 service nova] Lock "e597c7da-cb9e-413f-9c8e-e41ffbb31338-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2404.809771] env[62740]: DEBUG nova.compute.manager [req-32124005-8bc8-4e7c-a41b-fb49836ffcd1 req-2309aeba-5116-421a-ab80-6b64b58756f6 service nova] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] No waiting events found dispatching network-vif-plugged-9819639e-a998-4a38-a48e-06a40eb6eae9 {{(pid=62740) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2404.809934] env[62740]: WARNING nova.compute.manager [req-32124005-8bc8-4e7c-a41b-fb49836ffcd1 req-2309aeba-5116-421a-ab80-6b64b58756f6 service nova] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Received unexpected event network-vif-plugged-9819639e-a998-4a38-a48e-06a40eb6eae9 for instance with vm_state building and task_state spawning. [ 2404.895277] env[62740]: DEBUG nova.network.neutron [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Successfully updated port: 9819639e-a998-4a38-a48e-06a40eb6eae9 {{(pid=62740) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2404.895779] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 1c15821ea10648c0b96bbf361a1aa3e1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2404.906650] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c15821ea10648c0b96bbf361a1aa3e1 [ 2404.916521] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "refresh_cache-e597c7da-cb9e-413f-9c8e-e41ffbb31338" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2404.916670] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquired lock "refresh_cache-e597c7da-cb9e-413f-9c8e-e41ffbb31338" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2404.916817] env[62740]: DEBUG nova.network.neutron [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2404.918715] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 026fd0622b53484da187d46eb9e27b96 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2404.926409] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 026fd0622b53484da187d46eb9e27b96 [ 2404.955910] env[62740]: DEBUG nova.network.neutron [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2405.112928] env[62740]: DEBUG nova.network.neutron [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Updating instance_info_cache with network_info: [{"id": "9819639e-a998-4a38-a48e-06a40eb6eae9", "address": "fa:16:3e:41:dd:07", "network": {"id": "fe48b87d-d6bf-41e3-8587-388615fdb42f", "bridge": "br-int", "label": "tempest-ServersTestJSON-590567929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3897fd0522431c87c8830678fd59ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9819639e-a9", "ovs_interfaceid": "9819639e-a998-4a38-a48e-06a40eb6eae9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2405.113490] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 7b92f38ac5a845a98c860cfede5dcf22 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2405.123163] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b92f38ac5a845a98c860cfede5dcf22 [ 2405.123702] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Releasing lock "refresh_cache-e597c7da-cb9e-413f-9c8e-e41ffbb31338" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2405.123970] env[62740]: DEBUG nova.compute.manager [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Instance network_info: |[{"id": "9819639e-a998-4a38-a48e-06a40eb6eae9", "address": "fa:16:3e:41:dd:07", "network": {"id": "fe48b87d-d6bf-41e3-8587-388615fdb42f", "bridge": "br-int", "label": "tempest-ServersTestJSON-590567929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3897fd0522431c87c8830678fd59ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9819639e-a9", "ovs_interfaceid": "9819639e-a998-4a38-a48e-06a40eb6eae9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62740) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2405.124361] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:dd:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9819639e-a998-4a38-a48e-06a40eb6eae9', 'vif_model': 'vmxnet3'}] {{(pid=62740) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2405.132280] env[62740]: DEBUG oslo.service.loopingcall [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2405.132700] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Creating VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2405.132925] env[62740]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-153c3c2c-48c3-4e43-968e-07b3cec1fed4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.153402] env[62740]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2405.153402] env[62740]: value = "task-640382" [ 2405.153402] env[62740]: _type = "Task" [ 2405.153402] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2405.160712] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640382, 'name': CreateVM_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2405.663571] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640382, 'name': CreateVM_Task} progress is 25%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2406.166844] env[62740]: DEBUG oslo_vmware.api [-] Task: {'id': task-640382, 'name': CreateVM_Task, 'duration_secs': 0.76121} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2406.166844] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Created VM on the ESX host {{(pid=62740) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2406.175741] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2406.175741] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2406.175741] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2406.175741] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-507ac29c-5ae4-407a-b411-d942d011d42d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.180016] env[62740]: DEBUG oslo_vmware.api [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Waiting for the task: (returnval){ [ 2406.180016] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5203b54b-ce27-b11b-3665-862da09d9e43" [ 2406.180016] env[62740]: _type = "Task" [ 2406.180016] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2406.185970] env[62740]: DEBUG oslo_vmware.api [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]5203b54b-ce27-b11b-3665-862da09d9e43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2406.689928] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2406.689928] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Processing image 174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2406.689928] env[62740]: DEBUG oslo_concurrency.lockutils [None req-c3cdd5b8-bd70-4677-a04e-a009760a2284 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2406.869578] env[62740]: DEBUG nova.compute.manager [req-07873389-d6d9-40df-a3a0-eb5e15aba78d req-5b561a88-c1f2-4bb8-8727-2deab94a62d9 service nova] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Received event network-changed-9819639e-a998-4a38-a48e-06a40eb6eae9 {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2406.871087] env[62740]: DEBUG nova.compute.manager [req-07873389-d6d9-40df-a3a0-eb5e15aba78d req-5b561a88-c1f2-4bb8-8727-2deab94a62d9 service nova] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Refreshing instance network info cache due to event network-changed-9819639e-a998-4a38-a48e-06a40eb6eae9. {{(pid=62740) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2406.871087] env[62740]: DEBUG oslo_concurrency.lockutils [req-07873389-d6d9-40df-a3a0-eb5e15aba78d req-5b561a88-c1f2-4bb8-8727-2deab94a62d9 service nova] Acquiring lock "refresh_cache-e597c7da-cb9e-413f-9c8e-e41ffbb31338" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2406.871087] env[62740]: DEBUG oslo_concurrency.lockutils [req-07873389-d6d9-40df-a3a0-eb5e15aba78d req-5b561a88-c1f2-4bb8-8727-2deab94a62d9 service nova] Acquired lock "refresh_cache-e597c7da-cb9e-413f-9c8e-e41ffbb31338" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2406.871087] env[62740]: DEBUG nova.network.neutron [req-07873389-d6d9-40df-a3a0-eb5e15aba78d req-5b561a88-c1f2-4bb8-8727-2deab94a62d9 service nova] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Refreshing network info cache for port 9819639e-a998-4a38-a48e-06a40eb6eae9 {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2406.871087] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-07873389-d6d9-40df-a3a0-eb5e15aba78d req-5b561a88-c1f2-4bb8-8727-2deab94a62d9 service nova] Expecting reply to msg 274a2e833cd646549889a3641812d33b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2406.884195] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 274a2e833cd646549889a3641812d33b [ 2407.326390] env[62740]: DEBUG nova.network.neutron [req-07873389-d6d9-40df-a3a0-eb5e15aba78d req-5b561a88-c1f2-4bb8-8727-2deab94a62d9 service nova] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Updated VIF entry in instance network info cache for port 9819639e-a998-4a38-a48e-06a40eb6eae9. {{(pid=62740) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2407.326390] env[62740]: DEBUG nova.network.neutron [req-07873389-d6d9-40df-a3a0-eb5e15aba78d req-5b561a88-c1f2-4bb8-8727-2deab94a62d9 service nova] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Updating instance_info_cache with network_info: [{"id": "9819639e-a998-4a38-a48e-06a40eb6eae9", "address": "fa:16:3e:41:dd:07", "network": {"id": "fe48b87d-d6bf-41e3-8587-388615fdb42f", "bridge": "br-int", "label": "tempest-ServersTestJSON-590567929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3897fd0522431c87c8830678fd59ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9819639e-a9", "ovs_interfaceid": "9819639e-a998-4a38-a48e-06a40eb6eae9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2407.326390] env[62740]: INFO oslo_messaging._drivers.amqpdriver [req-07873389-d6d9-40df-a3a0-eb5e15aba78d req-5b561a88-c1f2-4bb8-8727-2deab94a62d9 service nova] Expecting reply to msg 72487985e2c0426ea0476ce264f7473f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2407.334458] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72487985e2c0426ea0476ce264f7473f [ 2407.335229] env[62740]: DEBUG oslo_concurrency.lockutils [req-07873389-d6d9-40df-a3a0-eb5e15aba78d req-5b561a88-c1f2-4bb8-8727-2deab94a62d9 service nova] Releasing lock "refresh_cache-e597c7da-cb9e-413f-9c8e-e41ffbb31338" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2407.924156] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2407.924156] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2408.891613] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2410.891234] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2411.886199] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2411.889908] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2411.890107] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2411.890218] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2411.890854] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 074bc4eebacf4a969f5de4400d0ef8b8 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2411.906641] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 074bc4eebacf4a969f5de4400d0ef8b8 [ 2411.908375] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2411.908596] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2411.908759] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2411.908937] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2411.909267] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2411.909417] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2411.909544] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2411.910145] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2411.910477] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 66f5abe43f2c40a99bcb60cf46aacd38 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2411.921238] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66f5abe43f2c40a99bcb60cf46aacd38 [ 2411.921771] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2411.921983] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2411.922167] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2411.922322] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2411.923385] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352afa82-2250-4a3c-9258-c80674b7e583 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.932907] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07afbc1-b914-4a13-b868-02b453646e5d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.946335] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1f87a6-54d4-43a3-afd7-8931ee2014ad {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.952253] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685fd50c-9497-4309-9a8d-1e0a200850f0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.980920] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181695MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2411.981072] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2411.981254] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2411.982000] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 52f52f843ee44a86b4624f62b2e3d084 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2412.006636] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52f52f843ee44a86b4624f62b2e3d084 [ 2412.009414] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 2afc478fefc5495d9f949ae398bc1b3e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2412.018434] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2afc478fefc5495d9f949ae398bc1b3e [ 2412.039014] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance b75490e6-ded3-4aa7-89ff-f4963fe82cfe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2412.039170] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2412.039303] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 0f438d9b-394a-465c-97ae-8393bdc3e1cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2412.039424] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2ad0d938-304b-4535-8362-099c3a6864f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2412.039543] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a925b4b2-7320-4c28-b083-c15adf060a00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2412.039657] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e597c7da-cb9e-413f-9c8e-e41ffbb31338 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2412.039830] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2412.040116] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2412.114320] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae3e404-829f-40f4-ac32-50e1b216ffae {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.120172] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0fe5d3-1af1-4ded-a089-ca3f6d6cadeb {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.149439] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a74c20-6b31-437d-b25e-ad51a3db422a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.157374] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f9d21c-0169-402d-856a-3a69aef542b1 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.170199] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2412.170836] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg cedcabfb1bed48c4842cef15b1e81c9d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2412.181739] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cedcabfb1bed48c4842cef15b1e81c9d [ 2412.185016] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2412.185016] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 28892a34f476424ebe17a2ac294e5349 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2412.195996] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28892a34f476424ebe17a2ac294e5349 [ 2412.196771] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2412.197157] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.216s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2414.178627] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2414.891239] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2415.891584] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2417.025618] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-9363c4ee-88d6-406d-a1fb-b7d329a378b7 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Expecting reply to msg bb4cdfd9b49b4df3860080f0a34ee3b1 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2417.038873] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb4cdfd9b49b4df3860080f0a34ee3b1 [ 2417.038873] env[62740]: DEBUG oslo_concurrency.lockutils [None req-9363c4ee-88d6-406d-a1fb-b7d329a378b7 tempest-ServerAddressesNegativeTestJSON-371723933 tempest-ServerAddressesNegativeTestJSON-371723933-project-member] Acquiring lock "a925b4b2-7320-4c28-b083-c15adf060a00" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2417.201013] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_power_states {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2417.201625] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg a21d493051c24003960f6bdecff651eb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2417.217470] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a21d493051c24003960f6bdecff651eb [ 2417.220421] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Getting list of instances from cluster (obj){ [ 2417.220421] env[62740]: value = "domain-c8" [ 2417.220421] env[62740]: _type = "ClusterComputeResource" [ 2417.220421] env[62740]: } {{(pid=62740) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2417.221731] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c266640-81fb-4788-9537-737c5aaf918c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.236556] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Got total of 6 instances {{(pid=62740) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2417.236726] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid b75490e6-ded3-4aa7-89ff-f4963fe82cfe {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 2417.236947] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8 {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 2417.237134] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 0f438d9b-394a-465c-97ae-8393bdc3e1cd {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 2417.237290] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid 2ad0d938-304b-4535-8362-099c3a6864f6 {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 2417.237441] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid a925b4b2-7320-4c28-b083-c15adf060a00 {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 2417.237591] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Triggering sync for uuid e597c7da-cb9e-413f-9c8e-e41ffbb31338 {{(pid=62740) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 2417.237895] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "b75490e6-ded3-4aa7-89ff-f4963fe82cfe" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2417.238144] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2417.238353] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "0f438d9b-394a-465c-97ae-8393bdc3e1cd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2417.238550] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "2ad0d938-304b-4535-8362-099c3a6864f6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2417.238758] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "a925b4b2-7320-4c28-b083-c15adf060a00" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2417.238974] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "e597c7da-cb9e-413f-9c8e-e41ffbb31338" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2437.094995] env[62740]: WARNING oslo_vmware.rw_handles [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2437.094995] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2437.094995] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2437.094995] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2437.094995] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2437.094995] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 2437.094995] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2437.094995] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2437.094995] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2437.094995] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2437.094995] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2437.094995] env[62740]: ERROR oslo_vmware.rw_handles [ 2437.095579] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/a8b2c5e5-5200-479e-8b36-bad2a4252679/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2437.100246] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2437.100246] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Copying Virtual Disk [datastore2] vmware_temp/a8b2c5e5-5200-479e-8b36-bad2a4252679/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/a8b2c5e5-5200-479e-8b36-bad2a4252679/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2437.100246] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a4abb16-3233-4223-9444-5b67fcff813a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2437.107020] env[62740]: DEBUG oslo_vmware.api [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Waiting for the task: (returnval){ [ 2437.107020] env[62740]: value = "task-640383" [ 2437.107020] env[62740]: _type = "Task" [ 2437.107020] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2437.114584] env[62740]: DEBUG oslo_vmware.api [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Task: {'id': task-640383, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2437.617606] env[62740]: DEBUG oslo_vmware.exceptions [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2437.617849] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2437.618428] env[62740]: ERROR nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2437.618428] env[62740]: Faults: ['InvalidArgument'] [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Traceback (most recent call last): [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] yield resources [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] self.driver.spawn(context, instance, image_meta, [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] self._fetch_image_if_missing(context, vi) [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] image_cache(vi, tmp_image_ds_loc) [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] vm_util.copy_virtual_disk( [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] session._wait_for_task(vmdk_copy_task) [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] return self.wait_for_task(task_ref) [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] return evt.wait() [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] result = hub.switch() [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] return self.greenlet.switch() [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] self.f(*self.args, **self.kw) [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] raise exceptions.translate_fault(task_info.error) [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Faults: ['InvalidArgument'] [ 2437.618428] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] [ 2437.620062] env[62740]: INFO nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Terminating instance [ 2437.620352] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2437.620562] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2437.620792] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e348cea6-231a-4082-bde9-13a543886229 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2437.623765] env[62740]: DEBUG nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2437.623965] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2437.624679] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f19707-b8c0-4c19-8d00-b5e1304d464a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2437.631448] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2437.631662] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f05f295-5f4f-4ab0-b38f-fcd26737f116 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2437.633786] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2437.633956] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2437.634861] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51379590-0685-4336-a027-865ae41ed470 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2437.639383] env[62740]: DEBUG oslo_vmware.api [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Waiting for the task: (returnval){ [ 2437.639383] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52224d24-983a-af00-cbf4-36a6f197720c" [ 2437.639383] env[62740]: _type = "Task" [ 2437.639383] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2437.648795] env[62740]: DEBUG oslo_vmware.api [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Task: {'id': session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]52224d24-983a-af00-cbf4-36a6f197720c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2437.703342] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2437.703582] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2437.703748] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Deleting the datastore file [datastore2] b75490e6-ded3-4aa7-89ff-f4963fe82cfe {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2437.704019] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88a129d6-7b89-4df4-b8e2-fd29b9965900 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2437.710293] env[62740]: DEBUG oslo_vmware.api [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Waiting for the task: (returnval){ [ 2437.710293] env[62740]: value = "task-640385" [ 2437.710293] env[62740]: _type = "Task" [ 2437.710293] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2437.717738] env[62740]: DEBUG oslo_vmware.api [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Task: {'id': task-640385, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2438.149275] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2438.149610] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Creating directory with path [datastore2] vmware_temp/412dfe59-51ff-4616-9910-c13c43585fbe/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2438.149763] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e71fa079-abe8-4313-a980-a0c835b2e51d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.161106] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Created directory with path [datastore2] vmware_temp/412dfe59-51ff-4616-9910-c13c43585fbe/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2438.161436] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Fetch image to [datastore2] vmware_temp/412dfe59-51ff-4616-9910-c13c43585fbe/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2438.161643] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/412dfe59-51ff-4616-9910-c13c43585fbe/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2438.162373] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9001a8d0-7f7c-4056-a0fa-4ea437f90686 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.169155] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5667e35-a7d4-4f92-9e31-702e9b616b6a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.178217] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ee9682-ce54-4726-8d13-a04aaf6d65cd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.209700] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78989546-9920-4e7f-b2d6-85059289d86d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.220763] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6e32d458-f27d-4b58-ab59-affbcc13402a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.222497] env[62740]: DEBUG oslo_vmware.api [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Task: {'id': task-640385, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076277} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2438.222745] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2438.222931] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2438.223120] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2438.223301] env[62740]: INFO nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2438.225523] env[62740]: DEBUG nova.compute.claims [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2438.225713] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2438.225934] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2438.227940] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 76aa256b83d3439eaa27aa7a5dc79407 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2438.251730] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2438.262735] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76aa256b83d3439eaa27aa7a5dc79407 [ 2438.310644] env[62740]: DEBUG oslo_vmware.rw_handles [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/412dfe59-51ff-4616-9910-c13c43585fbe/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2438.374601] env[62740]: DEBUG oslo_vmware.rw_handles [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2438.374813] env[62740]: DEBUG oslo_vmware.rw_handles [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/412dfe59-51ff-4616-9910-c13c43585fbe/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2438.418440] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c21aaf-2cdd-4dee-b44f-d483c43e44a9 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.425981] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e86c5c32-c6f2-44ed-835f-1e1c141b565b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.455681] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2491611a-43da-49e3-88f8-7b8f4e43ff7f {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.462179] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1e79d8-d9e6-4f81-96e0-731cec0f1e4a {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.474795] env[62740]: DEBUG nova.compute.provider_tree [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2438.475300] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg b45ee5a418094d6cb6f75feef763be85 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2438.483162] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b45ee5a418094d6cb6f75feef763be85 [ 2438.484015] env[62740]: DEBUG nova.scheduler.client.report [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2438.486218] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 75d33221f1da4f529a375e867af0b4cb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2438.497215] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75d33221f1da4f529a375e867af0b4cb [ 2438.497872] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.272s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2438.498404] env[62740]: ERROR nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2438.498404] env[62740]: Faults: ['InvalidArgument'] [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Traceback (most recent call last): [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] self.driver.spawn(context, instance, image_meta, [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] self._fetch_image_if_missing(context, vi) [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] image_cache(vi, tmp_image_ds_loc) [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] vm_util.copy_virtual_disk( [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] session._wait_for_task(vmdk_copy_task) [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] return self.wait_for_task(task_ref) [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] return evt.wait() [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] result = hub.switch() [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] return self.greenlet.switch() [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] self.f(*self.args, **self.kw) [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] raise exceptions.translate_fault(task_info.error) [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Faults: ['InvalidArgument'] [ 2438.498404] env[62740]: ERROR nova.compute.manager [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] [ 2438.499242] env[62740]: DEBUG nova.compute.utils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2438.500489] env[62740]: DEBUG nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Build of instance b75490e6-ded3-4aa7-89ff-f4963fe82cfe was re-scheduled: A specified parameter was not correct: fileType [ 2438.500489] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2438.500874] env[62740]: DEBUG nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2438.501061] env[62740]: DEBUG nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2438.501241] env[62740]: DEBUG nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2438.501405] env[62740]: DEBUG nova.network.neutron [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2438.753774] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 3120d1e03dfc4c77b5559714d44203bb in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2438.770018] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3120d1e03dfc4c77b5559714d44203bb [ 2438.770018] env[62740]: DEBUG nova.network.neutron [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2438.770018] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 72bd5f65c6a149638800dddf32b5165a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2438.783479] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72bd5f65c6a149638800dddf32b5165a [ 2438.784134] env[62740]: INFO nova.compute.manager [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Took 0.28 seconds to deallocate network for instance. [ 2438.786537] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 6621bb46408340bb89812dd298714c53 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2438.825341] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6621bb46408340bb89812dd298714c53 [ 2438.827971] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg b1278a55e4de4d2aaf33b4b8048f8c8a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2438.861014] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1278a55e4de4d2aaf33b4b8048f8c8a [ 2438.893806] env[62740]: INFO nova.scheduler.client.report [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Deleted allocations for instance b75490e6-ded3-4aa7-89ff-f4963fe82cfe [ 2438.901995] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 88c4bcfdf870499599eef0ae6038c045 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2438.916413] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88c4bcfdf870499599eef0ae6038c045 [ 2438.916982] env[62740]: DEBUG oslo_concurrency.lockutils [None req-45a45e6f-8bf7-420b-a9a0-1b083974b7bd tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "b75490e6-ded3-4aa7-89ff-f4963fe82cfe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 428.619s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2438.917250] env[62740]: DEBUG oslo_concurrency.lockutils [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "b75490e6-ded3-4aa7-89ff-f4963fe82cfe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 232.793s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2438.917471] env[62740]: DEBUG oslo_concurrency.lockutils [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Acquiring lock "b75490e6-ded3-4aa7-89ff-f4963fe82cfe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2438.917677] env[62740]: DEBUG oslo_concurrency.lockutils [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "b75490e6-ded3-4aa7-89ff-f4963fe82cfe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2438.917840] env[62740]: DEBUG oslo_concurrency.lockutils [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "b75490e6-ded3-4aa7-89ff-f4963fe82cfe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2438.922561] env[62740]: INFO nova.compute.manager [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Terminating instance [ 2438.925350] env[62740]: DEBUG nova.compute.manager [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2438.925576] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2438.926076] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c30385d4-bb79-4897-b267-cf4fe04538c4 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.936964] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffec2f5-fa88-4bae-bf17-053851f25095 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.962916] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b75490e6-ded3-4aa7-89ff-f4963fe82cfe could not be found. [ 2438.963201] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2438.963434] env[62740]: INFO nova.compute.manager [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2438.963750] env[62740]: DEBUG oslo.service.loopingcall [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2438.963977] env[62740]: DEBUG nova.compute.manager [-] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2438.964122] env[62740]: DEBUG nova.network.neutron [-] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2438.982059] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 385582418aee4c0b81a75362f18e084f in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2438.987731] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 385582418aee4c0b81a75362f18e084f [ 2438.988243] env[62740]: DEBUG nova.network.neutron [-] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2438.988672] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f611977c82664eaeaf215fa4b98d86a7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2438.996216] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f611977c82664eaeaf215fa4b98d86a7 [ 2438.996710] env[62740]: INFO nova.compute.manager [-] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] Took 0.03 seconds to deallocate network for instance. [ 2439.000518] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 5a5a63c1d5174d34a213d34bf57183d7 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2439.027333] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a5a63c1d5174d34a213d34bf57183d7 [ 2439.044615] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 6f59d6dc1d454a53baed3e443104c718 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2439.084154] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f59d6dc1d454a53baed3e443104c718 [ 2439.087562] env[62740]: DEBUG oslo_concurrency.lockutils [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Lock "b75490e6-ded3-4aa7-89ff-f4963fe82cfe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.170s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2439.087934] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-24552417-118e-4acb-93eb-4a2b9765a445 tempest-ServersTestJSON-1077887089 tempest-ServersTestJSON-1077887089-project-member] Expecting reply to msg 262a11a878b24dc4a07d4ff80ab424a5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2439.088654] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "b75490e6-ded3-4aa7-89ff-f4963fe82cfe" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 21.851s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2439.089257] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: b75490e6-ded3-4aa7-89ff-f4963fe82cfe] During sync_power_state the instance has a pending task (deleting). Skip. [ 2439.089487] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "b75490e6-ded3-4aa7-89ff-f4963fe82cfe" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2439.098177] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 262a11a878b24dc4a07d4ff80ab424a5 [ 2450.209242] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 764266bb17d74d9388f58b94fe9e9609 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2450.218365] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 764266bb17d74d9388f58b94fe9e9609 [ 2469.890911] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2469.890911] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2469.891336] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62740) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2470.891983] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2472.886672] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2472.890309] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager.update_available_resource {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2472.890704] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg f669e8150b9143bfa8a8effbc779b4f3 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2472.903015] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f669e8150b9143bfa8a8effbc779b4f3 [ 2472.904166] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2472.904398] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2472.904571] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2472.904729] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62740) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2472.905818] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd74592-3fd1-48ac-a2a6-72068ccd9ea7 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.914671] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f686c52-84b5-46b6-8870-cf9fb5b7f583 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.928159] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94699d76-1189-4e30-bf8b-7568d0496cd3 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.934123] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f62f2fb-9c92-4ec9-ac18-d01a1b3d827d {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.962298] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181687MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=62740) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2472.962449] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2472.962621] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2472.963440] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 03b2d7cdb1c841f2b967dac0ed8ea2f6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2472.986679] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03b2d7cdb1c841f2b967dac0ed8ea2f6 [ 2472.989284] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg ca49956726884435a0a9cb1c3ed4d265 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2472.998826] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca49956726884435a0a9cb1c3ed4d265 [ 2473.022228] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2473.022228] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 0f438d9b-394a-465c-97ae-8393bdc3e1cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2473.022228] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance 2ad0d938-304b-4535-8362-099c3a6864f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2473.022228] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance a925b4b2-7320-4c28-b083-c15adf060a00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2473.022228] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Instance e597c7da-cb9e-413f-9c8e-e41ffbb31338 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62740) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2473.022228] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2473.022228] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62740) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2473.085522] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdddf91e-3e23-4a27-93f7-d5024f7d98de {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.093931] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eedfa94-fd12-4be3-82fb-3700b674d44e {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.122195] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70791d43-4bb3-4f2c-a3e9-ab42ce500365 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.128627] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12dd385-0f5e-4c78-821e-a467a3835db2 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.141239] env[62740]: DEBUG nova.compute.provider_tree [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2473.141637] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 07d3b1eeec144b929635dc6abced9c43 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2473.148281] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07d3b1eeec144b929635dc6abced9c43 [ 2473.149114] env[62740]: DEBUG nova.scheduler.client.report [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2473.151283] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 5f73421a72c2449288edb8199a254013 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2473.161915] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f73421a72c2449288edb8199a254013 [ 2473.162507] env[62740]: DEBUG nova.compute.resource_tracker [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62740) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2473.162686] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.200s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2474.163260] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2474.163546] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Starting heal instance info cache {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2474.163600] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Rebuilding the list of instances to heal {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2474.164187] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg 30ea051347c14482930ffa41aba674ea in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2474.176672] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30ea051347c14482930ffa41aba674ea [ 2474.178189] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2474.178341] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2474.178472] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 2ad0d938-304b-4535-8362-099c3a6864f6] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2474.178601] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: a925b4b2-7320-4c28-b083-c15adf060a00] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2474.178727] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: e597c7da-cb9e-413f-9c8e-e41ffbb31338] Skipping network cache update for instance because it is Building. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2474.178851] env[62740]: DEBUG nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Didn't find any instances for network info cache update. {{(pid=62740) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2474.901957] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2474.902667] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Expecting reply to msg c4b874d0a59f43f9af5b33fa0fa43116 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2474.915640] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4b874d0a59f43f9af5b33fa0fa43116 [ 2475.891238] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2475.891533] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2476.891613] env[62740]: DEBUG oslo_service.periodic_task [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62740) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2484.268661] env[62740]: WARNING oslo_vmware.rw_handles [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2484.268661] env[62740]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2484.268661] env[62740]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2484.268661] env[62740]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2484.268661] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2484.268661] env[62740]: ERROR oslo_vmware.rw_handles response.begin() [ 2484.268661] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2484.268661] env[62740]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2484.268661] env[62740]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2484.268661] env[62740]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2484.268661] env[62740]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2484.268661] env[62740]: ERROR oslo_vmware.rw_handles [ 2484.269305] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Downloaded image file data 174f7655-3fb8-458a-8e9c-108936afe738 to vmware_temp/412dfe59-51ff-4616-9910-c13c43585fbe/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2484.271063] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Caching image {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2484.271341] env[62740]: DEBUG nova.virt.vmwareapi.vm_util [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Copying Virtual Disk [datastore2] vmware_temp/412dfe59-51ff-4616-9910-c13c43585fbe/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk to [datastore2] vmware_temp/412dfe59-51ff-4616-9910-c13c43585fbe/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk {{(pid=62740) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2484.271633] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77aa2f01-5b4f-4be9-8fa0-ae49eadddc46 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.280166] env[62740]: DEBUG oslo_vmware.api [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Waiting for the task: (returnval){ [ 2484.280166] env[62740]: value = "task-640386" [ 2484.280166] env[62740]: _type = "Task" [ 2484.280166] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2484.287830] env[62740]: DEBUG oslo_vmware.api [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Task: {'id': task-640386, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2484.797710] env[62740]: DEBUG oslo_vmware.exceptions [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Fault InvalidArgument not matched. {{(pid=62740) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2484.800352] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Releasing lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2484.800933] env[62740]: ERROR nova.compute.manager [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2484.800933] env[62740]: Faults: ['InvalidArgument'] [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Traceback (most recent call last): [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] yield resources [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] self.driver.spawn(context, instance, image_meta, [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] self._fetch_image_if_missing(context, vi) [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] image_cache(vi, tmp_image_ds_loc) [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] vm_util.copy_virtual_disk( [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] session._wait_for_task(vmdk_copy_task) [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] return self.wait_for_task(task_ref) [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] return evt.wait() [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] result = hub.switch() [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] return self.greenlet.switch() [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] self.f(*self.args, **self.kw) [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] raise exceptions.translate_fault(task_info.error) [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Faults: ['InvalidArgument'] [ 2484.800933] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] [ 2484.802340] env[62740]: INFO nova.compute.manager [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Terminating instance [ 2484.802850] env[62740]: DEBUG oslo_concurrency.lockutils [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/174f7655-3fb8-458a-8e9c-108936afe738/174f7655-3fb8-458a-8e9c-108936afe738.vmdk" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2484.803831] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2484.804376] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquiring lock "refresh_cache-22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2484.804541] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquired lock "refresh_cache-22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2484.804713] env[62740]: DEBUG nova.network.neutron [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2484.805194] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 42627e9217b443c199efd99a3f787d90 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2484.805891] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96a19c78-c487-4803-ab7f-ca5351b200f6 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.814668] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2484.814859] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62740) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2484.815574] env[62740]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e0421bb-ee5a-4d4a-b02f-8f2c22737650 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.818735] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42627e9217b443c199efd99a3f787d90 [ 2484.826581] env[62740]: DEBUG oslo_vmware.api [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Waiting for the task: (returnval){ [ 2484.826581] env[62740]: value = "session[527adbf7-6c5f-c6f1-cfdc-adeae2ff7ab1]527e24a7-1312-1b91-4d0f-8be121a8d666" [ 2484.826581] env[62740]: _type = "Task" [ 2484.826581] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2484.838028] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Preparing fetch location {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2484.838298] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating directory with path [datastore2] vmware_temp/32b69907-d2b7-4b07-bae1-7084a5653786/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2484.838586] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c542b8e-8e1a-4260-84f1-95a482dfc49b {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.858302] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Created directory with path [datastore2] vmware_temp/32b69907-d2b7-4b07-bae1-7084a5653786/174f7655-3fb8-458a-8e9c-108936afe738 {{(pid=62740) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2484.858567] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Fetch image to [datastore2] vmware_temp/32b69907-d2b7-4b07-bae1-7084a5653786/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk {{(pid=62740) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2484.858740] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to [datastore2] vmware_temp/32b69907-d2b7-4b07-bae1-7084a5653786/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk on the data store datastore2 {{(pid=62740) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2484.859541] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cef5e68-70f0-4168-8feb-547b2789705c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.866433] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13e5f12-eaff-43c8-a1d8-2f010be60307 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.876103] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0912c417-4702-4d06-a14e-4d57bc6b32aa {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.906601] env[62740]: DEBUG nova.network.neutron [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2484.908827] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61228d5-e38d-4ba0-91ec-7e6fce9f2e57 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.915017] env[62740]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c0717317-c72c-4cbd-b7ef-28cf6385c3cd {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.935798] env[62740]: DEBUG nova.virt.vmwareapi.images [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] [instance: 0f438d9b-394a-465c-97ae-8393bdc3e1cd] Downloading image file data 174f7655-3fb8-458a-8e9c-108936afe738 to the data store datastore2 {{(pid=62740) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2484.979479] env[62740]: DEBUG nova.network.neutron [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2484.980012] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 259e3fd59cfd42ea8ddcc3932815d33a in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2484.993514] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 259e3fd59cfd42ea8ddcc3932815d33a [ 2484.994611] env[62740]: DEBUG oslo_vmware.rw_handles [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/32b69907-d2b7-4b07-bae1-7084a5653786/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2484.995991] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Releasing lock "refresh_cache-22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2484.996401] env[62740]: DEBUG nova.compute.manager [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2484.996596] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2484.997856] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e83e5dd-4964-454d-af0e-1718e840db0c {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.058516] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Unregistering the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2485.059728] env[62740]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8759e40e-ca5a-4f23-9672-a43d7a864139 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.061528] env[62740]: DEBUG oslo_vmware.rw_handles [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Completed reading data from the image iterator. {{(pid=62740) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2485.061699] env[62740]: DEBUG oslo_vmware.rw_handles [None req-ac2b75fa-0fe0-4ed7-9be6-55beb9ff340c tempest-DeleteServersTestJSON-1378904294 tempest-DeleteServersTestJSON-1378904294-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/32b69907-d2b7-4b07-bae1-7084a5653786/174f7655-3fb8-458a-8e9c-108936afe738/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62740) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2485.087689] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Unregistered the VM {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2485.087889] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Deleting contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2485.088082] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Deleting the datastore file [datastore2] 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8 {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2485.088352] env[62740]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be79d004-55a6-4e5c-9039-9fd5cb7616a0 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.093758] env[62740]: DEBUG oslo_vmware.api [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Waiting for the task: (returnval){ [ 2485.093758] env[62740]: value = "task-640388" [ 2485.093758] env[62740]: _type = "Task" [ 2485.093758] env[62740]: } to complete. {{(pid=62740) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2485.102085] env[62740]: DEBUG oslo_vmware.api [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Task: {'id': task-640388, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2485.604746] env[62740]: DEBUG oslo_vmware.api [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Task: {'id': task-640388, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033484} completed successfully. {{(pid=62740) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2485.604746] env[62740]: DEBUG nova.virt.vmwareapi.ds_util [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Deleted the datastore file {{(pid=62740) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2485.604746] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Deleted contents of the VM from datastore datastore2 {{(pid=62740) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2485.604746] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2485.604746] env[62740]: INFO nova.compute.manager [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2485.604746] env[62740]: DEBUG oslo.service.loopingcall [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2485.605224] env[62740]: DEBUG nova.compute.manager [-] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Skipping network deallocation for instance since networking was not requested. {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2485.607493] env[62740]: DEBUG nova.compute.claims [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Aborting claim: {{(pid=62740) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2485.607493] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2485.607614] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2485.609797] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg b67b605d737b45b3a5a015459fc9b1e5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2485.653142] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b67b605d737b45b3a5a015459fc9b1e5 [ 2485.730191] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5481bdf4-6ec9-473b-ad29-d2cb16c80045 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.737915] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1d06ea-beef-4863-b441-7838dcdc0994 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.768439] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8145a5-6318-452c-a1ee-62dc5e476675 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.775234] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a568398-3ffa-4b6c-baf2-a717649bd327 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.788778] env[62740]: DEBUG nova.compute.provider_tree [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Inventory has not changed in ProviderTree for provider: d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 {{(pid=62740) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2485.788778] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 0371fd7291b14d9b9d4b2e6805d276ec in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2485.795959] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0371fd7291b14d9b9d4b2e6805d276ec [ 2485.796804] env[62740]: DEBUG nova.scheduler.client.report [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Inventory has not changed for provider d02ecd59-4cb8-4d48-91cb-d1e1e7a00ab0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62740) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2485.799022] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 59cdc917c0644ada8cdeba33f8194d6b in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2485.809764] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59cdc917c0644ada8cdeba33f8194d6b [ 2485.810522] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.203s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2485.811025] env[62740]: ERROR nova.compute.manager [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2485.811025] env[62740]: Faults: ['InvalidArgument'] [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Traceback (most recent call last): [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] self.driver.spawn(context, instance, image_meta, [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] self._fetch_image_if_missing(context, vi) [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] image_cache(vi, tmp_image_ds_loc) [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] vm_util.copy_virtual_disk( [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] session._wait_for_task(vmdk_copy_task) [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] return self.wait_for_task(task_ref) [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] return evt.wait() [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] result = hub.switch() [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] return self.greenlet.switch() [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] self.f(*self.args, **self.kw) [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] raise exceptions.translate_fault(task_info.error) [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Faults: ['InvalidArgument'] [ 2485.811025] env[62740]: ERROR nova.compute.manager [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] [ 2485.811830] env[62740]: DEBUG nova.compute.utils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] VimFaultException {{(pid=62740) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2485.813295] env[62740]: DEBUG nova.compute.manager [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Build of instance 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8 was re-scheduled: A specified parameter was not correct: fileType [ 2485.813295] env[62740]: Faults: ['InvalidArgument'] {{(pid=62740) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2485.813687] env[62740]: DEBUG nova.compute.manager [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Unplugging VIFs for instance {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2485.813905] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquiring lock "refresh_cache-22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2485.814064] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquired lock "refresh_cache-22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2485.814233] env[62740]: DEBUG nova.network.neutron [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2485.814615] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg bfd7b949e3634da29a6052cf73433e1e in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2485.820471] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfd7b949e3634da29a6052cf73433e1e [ 2485.837767] env[62740]: DEBUG nova.network.neutron [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2485.901201] env[62740]: DEBUG nova.network.neutron [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2485.901765] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 02084acc5ce546a09899f4d53727ee90 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2485.909325] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02084acc5ce546a09899f4d53727ee90 [ 2485.909918] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Releasing lock "refresh_cache-22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2485.910160] env[62740]: DEBUG nova.compute.manager [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62740) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2485.910345] env[62740]: DEBUG nova.compute.manager [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Skipping network deallocation for instance since networking was not requested. {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2485.911944] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 04d5c2e5a70245d09a45af467ff91d50 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2485.952443] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04d5c2e5a70245d09a45af467ff91d50 [ 2485.958016] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 8d7f7c7b1bb34fd9b3a6c2e7ed1640c2 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2485.984710] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d7f7c7b1bb34fd9b3a6c2e7ed1640c2 [ 2486.010114] env[62740]: INFO nova.scheduler.client.report [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Deleted allocations for instance 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8 [ 2486.015665] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 8ee7fa479eb64ee1980a8eee6e741817 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2486.028576] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ee7fa479eb64ee1980a8eee6e741817 [ 2486.029094] env[62740]: DEBUG oslo_concurrency.lockutils [None req-331e0194-dcd9-4434-894e-9114edb9e588 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Lock "22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 422.802s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2486.029332] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Lock "22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 227.324s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2486.029553] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquiring lock "22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2486.029789] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Lock "22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2486.029941] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Lock "22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2486.031778] env[62740]: INFO nova.compute.manager [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Terminating instance [ 2486.033373] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquiring lock "refresh_cache-22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2486.033532] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Acquired lock "refresh_cache-22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2486.033697] env[62740]: DEBUG nova.network.neutron [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Building network info cache for instance {{(pid=62740) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2486.034109] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 25da1d38c1cf4efbae45a50af0db40b5 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2486.042965] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25da1d38c1cf4efbae45a50af0db40b5 [ 2486.061617] env[62740]: DEBUG nova.network.neutron [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2486.121817] env[62740]: DEBUG nova.network.neutron [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2486.122390] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg eb333b04d54b447e9953372699147208 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2486.136593] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb333b04d54b447e9953372699147208 [ 2486.137413] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Releasing lock "refresh_cache-22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" {{(pid=62740) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2486.137918] env[62740]: DEBUG nova.compute.manager [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Start destroying the instance on the hypervisor. {{(pid=62740) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2486.138245] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Destroying instance {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2486.138913] env[62740]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-771d7781-46ed-4af2-a6d5-0067dc69b261 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2486.148016] env[62740]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63004ebc-afd0-4feb-8b82-e03c92dd7413 {{(pid=62740) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2486.172992] env[62740]: WARNING nova.virt.vmwareapi.vmops [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8 could not be found. [ 2486.173196] env[62740]: DEBUG nova.virt.vmwareapi.vmops [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Instance destroyed {{(pid=62740) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2486.173382] env[62740]: INFO nova.compute.manager [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2486.173634] env[62740]: DEBUG oslo.service.loopingcall [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62740) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2486.173865] env[62740]: DEBUG nova.compute.manager [-] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Deallocating network for instance {{(pid=62740) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2486.173964] env[62740]: DEBUG nova.network.neutron [-] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] deallocate_for_instance() {{(pid=62740) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2486.191643] env[62740]: DEBUG nova.network.neutron [-] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Instance cache missing network info. {{(pid=62740) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2486.192122] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d241432c171548cda333cc7d0b581adc in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2486.199425] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d241432c171548cda333cc7d0b581adc [ 2486.199788] env[62740]: DEBUG nova.network.neutron [-] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Updating instance_info_cache with network_info: [] {{(pid=62740) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2486.200175] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 16f94d7f844b4cf29574f7307725aa07 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2486.207223] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16f94d7f844b4cf29574f7307725aa07 [ 2486.207635] env[62740]: INFO nova.compute.manager [-] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] Took 0.03 seconds to deallocate network for instance. [ 2486.210956] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 24fd0ff443d44cd59e9f9324730bce8d in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2486.242653] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24fd0ff443d44cd59e9f9324730bce8d [ 2486.256833] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 42a7a52dab76402cae52c99746c48461 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2486.293734] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42a7a52dab76402cae52c99746c48461 [ 2486.296619] env[62740]: DEBUG oslo_concurrency.lockutils [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Lock "22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.267s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2486.297100] env[62740]: INFO oslo_messaging._drivers.amqpdriver [None req-8be0f0e5-8c87-496f-8823-492d2347f615 tempest-ServerShowV254Test-1474381390 tempest-ServerShowV254Test-1474381390-project-member] Expecting reply to msg 8a7f73b8698742849aebfa1e2f3410b6 in queue reply_30cb6e3d754a4ebf9cedab7950709402 [ 2486.298054] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 69.060s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2486.298054] env[62740]: INFO nova.compute.manager [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] [instance: 22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8] During sync_power_state the instance has a pending task (deleting). Skip. [ 2486.298178] env[62740]: DEBUG oslo_concurrency.lockutils [None req-00bab09f-8114-4bd5-b725-ae154da4d1af None None] Lock "22bcbd37-b108-4cd8-bd9a-6f73ebf5f3f8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62740) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2486.306465] env[62740]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a7f73b8698742849aebfa1e2f3410b6